acryl-datahub-cloud 0.3.8.3rc1__py3-none-any.whl → 0.3.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub-cloud might be problematic. Click here for more details.

Files changed (79) hide show
  1. acryl_datahub_cloud/_codegen_config.json +1 -1
  2. acryl_datahub_cloud/acryl_cs_issues/acryl_customer.py +1 -1
  3. acryl_datahub_cloud/action_request/__init__.py +0 -0
  4. acryl_datahub_cloud/action_request/action_request_owner_source.py +174 -0
  5. acryl_datahub_cloud/api/__init__.py +1 -1
  6. acryl_datahub_cloud/api/client.py +2 -2
  7. acryl_datahub_cloud/datahub_reporting/datahub_dataset.py +6 -6
  8. acryl_datahub_cloud/datahub_reporting/datahub_form_reporting.py +69 -35
  9. acryl_datahub_cloud/datahub_reporting/extract_sql.py +4 -4
  10. acryl_datahub_cloud/datahub_usage_reporting/usage_feature_patch_builder.py +21 -21
  11. acryl_datahub_cloud/datahub_usage_reporting/usage_feature_reporter.py +14 -13
  12. acryl_datahub_cloud/metadata/_urns/urn_defs.py +1130 -484
  13. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/actionrequest/__init__.py +6 -0
  14. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/dataplatforminstance/__init__.py +2 -0
  15. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/dataset/__init__.py +2 -0
  16. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/executorglobalconfig/__init__.py +15 -0
  17. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/executorpool/__init__.py +4 -0
  18. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/metadata/key/__init__.py +4 -0
  19. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/metric/__init__.py +29 -0
  20. acryl_datahub_cloud/metadata/schema.avsc +839 -49
  21. acryl_datahub_cloud/metadata/schema_classes.py +1286 -63
  22. acryl_datahub_cloud/metadata/schemas/ActionRequestInfo.avsc +422 -12
  23. acryl_datahub_cloud/metadata/schemas/ActionRequestStatus.avsc +12 -0
  24. acryl_datahub_cloud/metadata/schemas/AssertionAnalyticsRunEvent.avsc +5 -3
  25. acryl_datahub_cloud/metadata/schemas/AssertionInfo.avsc +5 -3
  26. acryl_datahub_cloud/metadata/schemas/AssertionRunEvent.avsc +5 -3
  27. acryl_datahub_cloud/metadata/schemas/BusinessAttributeInfo.avsc +6 -2
  28. acryl_datahub_cloud/metadata/schemas/BusinessAttributes.avsc +6 -0
  29. acryl_datahub_cloud/metadata/schemas/ChartInfo.avsc +1 -0
  30. acryl_datahub_cloud/metadata/schemas/ChartKey.avsc +3 -3
  31. acryl_datahub_cloud/metadata/schemas/ContainerKey.avsc +1 -1
  32. acryl_datahub_cloud/metadata/schemas/CorpGroupKey.avsc +1 -1
  33. acryl_datahub_cloud/metadata/schemas/DashboardKey.avsc +3 -3
  34. acryl_datahub_cloud/metadata/schemas/DataFlowKey.avsc +1 -1
  35. acryl_datahub_cloud/metadata/schemas/DataHubActionInfo.avsc +1 -1
  36. acryl_datahub_cloud/metadata/schemas/DataHubConnectionKey.avsc +2 -1
  37. acryl_datahub_cloud/metadata/schemas/DataHubIngestionSourceInfo.avsc +9 -4
  38. acryl_datahub_cloud/metadata/schemas/DataHubMetricCubeDefinition.avsc +185 -0
  39. acryl_datahub_cloud/metadata/schemas/DataHubMetricCubeEvent.avsc +184 -0
  40. acryl_datahub_cloud/metadata/schemas/DataHubMetricCubeKey.avsc +22 -0
  41. acryl_datahub_cloud/metadata/schemas/DataJobKey.avsc +4 -4
  42. acryl_datahub_cloud/metadata/schemas/DataPlatformInstanceKey.avsc +2 -1
  43. acryl_datahub_cloud/metadata/schemas/DataProcessInstanceInput.avsc +132 -2
  44. acryl_datahub_cloud/metadata/schemas/DataProcessInstanceOutput.avsc +131 -1
  45. acryl_datahub_cloud/metadata/schemas/DatasetKey.avsc +14 -13
  46. acryl_datahub_cloud/metadata/schemas/EditableSchemaMetadata.avsc +6 -2
  47. acryl_datahub_cloud/metadata/schemas/ExecutionRequestInput.avsc +6 -1
  48. acryl_datahub_cloud/metadata/schemas/ExecutionRequestSignal.avsc +1 -1
  49. acryl_datahub_cloud/metadata/schemas/FormInfo.avsc +5 -0
  50. acryl_datahub_cloud/metadata/schemas/GlossaryTerms.avsc +3 -1
  51. acryl_datahub_cloud/metadata/schemas/IcebergCatalogInfo.avsc +28 -0
  52. acryl_datahub_cloud/metadata/schemas/IcebergWarehouseInfo.avsc +96 -0
  53. acryl_datahub_cloud/metadata/schemas/IncidentActivityEvent.avsc +4 -1
  54. acryl_datahub_cloud/metadata/schemas/IncidentInfo.avsc +4 -1
  55. acryl_datahub_cloud/metadata/schemas/InputFields.avsc +3 -1
  56. acryl_datahub_cloud/metadata/schemas/MLFeatureKey.avsc +1 -1
  57. acryl_datahub_cloud/metadata/schemas/MLFeatureTableKey.avsc +1 -1
  58. acryl_datahub_cloud/metadata/schemas/MLModelGroupKey.avsc +1 -1
  59. acryl_datahub_cloud/metadata/schemas/MLModelKey.avsc +3 -3
  60. acryl_datahub_cloud/metadata/schemas/MLPrimaryKeyKey.avsc +1 -1
  61. acryl_datahub_cloud/metadata/schemas/MetadataChangeEvent.avsc +399 -176
  62. acryl_datahub_cloud/metadata/schemas/MonitorInfo.avsc +6 -4
  63. acryl_datahub_cloud/metadata/schemas/NotebookKey.avsc +1 -1
  64. acryl_datahub_cloud/metadata/schemas/Operation.avsc +4 -2
  65. acryl_datahub_cloud/metadata/schemas/RemoteExecutorGlobalConfigKey.avsc +21 -0
  66. acryl_datahub_cloud/metadata/schemas/RemoteExecutorPoolGlobalConfig.avsc +16 -0
  67. acryl_datahub_cloud/metadata/schemas/RemoteExecutorPoolInfo.avsc +85 -0
  68. acryl_datahub_cloud/metadata/schemas/RemoteExecutorPoolKey.avsc +1 -1
  69. acryl_datahub_cloud/metadata/schemas/RemoteExecutorStatus.avsc +5 -5
  70. acryl_datahub_cloud/metadata/schemas/SchemaFieldKey.avsc +2 -2
  71. acryl_datahub_cloud/metadata/schemas/SchemaMetadata.avsc +3 -1
  72. acryl_datahub_cloud/metadata/schemas/VersionProperties.avsc +18 -0
  73. acryl_datahub_cloud/metadata/schemas/VersionSetProperties.avsc +5 -0
  74. {acryl_datahub_cloud-0.3.8.3rc1.dist-info → acryl_datahub_cloud-0.3.9.dist-info}/METADATA +35 -35
  75. {acryl_datahub_cloud-0.3.8.3rc1.dist-info → acryl_datahub_cloud-0.3.9.dist-info}/RECORD +78 -68
  76. {acryl_datahub_cloud-0.3.8.3rc1.dist-info → acryl_datahub_cloud-0.3.9.dist-info}/entry_points.txt +1 -0
  77. acryl_datahub_cloud/api/entity_versioning.py +0 -167
  78. {acryl_datahub_cloud-0.3.8.3rc1.dist-info → acryl_datahub_cloud-0.3.9.dist-info}/WHEEL +0 -0
  79. {acryl_datahub_cloud-0.3.8.3rc1.dist-info → acryl_datahub_cloud-0.3.9.dist-info}/top_level.txt +0 -0
@@ -301,7 +301,7 @@ class DataHubActionConfigClass(DictWrapper):
301
301
 
302
302
  @property
303
303
  def executorId(self) -> Union[None, str]:
304
- """The id of the executor to use to execute the automation. Defaults to 'default' (runs locally)"""
304
+ """The id of the executor pool to use to execute the automation. Defaults to 'default' (runs locally embedded)"""
305
305
  return self._inner_dict.get('executorId') # type: ignore
306
306
 
307
307
  @executorId.setter
@@ -720,6 +720,7 @@ class ActionRequestInfoClass(_Aspect):
720
720
  subResourceType: Union[None, str]=None,
721
721
  subResource: Union[None, str]=None,
722
722
  params: Union[None, "ActionRequestParamsClass"]=None,
723
+ description: Union[None, str]=None,
723
724
  dueDate: Union[None, int]=None,
724
725
  origin: Optional[Union[Union[str, "ActionRequestOriginClass"], None]]=None,
725
726
  inferenceMetadata: Union[None, "InferenceMetadataClass"]=None,
@@ -737,6 +738,7 @@ class ActionRequestInfoClass(_Aspect):
737
738
  self.params = params
738
739
  self.created = created
739
740
  self.createdBy = createdBy
741
+ self.description = description
740
742
  self.dueDate = dueDate
741
743
  if origin is None:
742
744
  # default: 'MANUAL'
@@ -757,6 +759,7 @@ class ActionRequestInfoClass(_Aspect):
757
759
  self.params = self.RECORD_SCHEMA.fields_dict["params"].default
758
760
  self.created = int()
759
761
  self.createdBy = str()
762
+ self.description = self.RECORD_SCHEMA.fields_dict["description"].default
760
763
  self.dueDate = self.RECORD_SCHEMA.fields_dict["dueDate"].default
761
764
  self.origin = self.RECORD_SCHEMA.fields_dict["origin"].default
762
765
  self.inferenceMetadata = self.RECORD_SCHEMA.fields_dict["inferenceMetadata"].default
@@ -774,7 +777,9 @@ class ActionRequestInfoClass(_Aspect):
774
777
 
775
778
  @property
776
779
  def assignedUsers(self) -> List[str]:
777
- """The users this action request is assigned to"""
780
+ """The users this action request is assigned to.
781
+ By default, action requests are assigned to Dataset Owners
782
+ and with anyone who has the View Proposals platform privilege."""
778
783
  return self._inner_dict.get('assignedUsers') # type: ignore
779
784
 
780
785
  @assignedUsers.setter
@@ -784,7 +789,9 @@ class ActionRequestInfoClass(_Aspect):
784
789
 
785
790
  @property
786
791
  def assignedGroups(self) -> List[str]:
787
- """The groups this action request is assigned to"""
792
+ """The groups this action request is assigned to
793
+ By default, action requests are assigned to Dataset Owners
794
+ and with anyone who has the View Proposals platform privilege."""
788
795
  return self._inner_dict.get('assignedGroups') # type: ignore
789
796
 
790
797
  @assignedGroups.setter
@@ -794,7 +801,8 @@ class ActionRequestInfoClass(_Aspect):
794
801
 
795
802
  @property
796
803
  def assignedRoles(self) -> Union[None, List[str]]:
797
- """The roles this action request is assigned to"""
804
+ """The roles this action request is assigned to
805
+ By default, action requests are assigned any roles that have the View Proposals platform privilege."""
798
806
  return self._inner_dict.get('assignedRoles') # type: ignore
799
807
 
800
808
  @assignedRoles.setter
@@ -834,7 +842,8 @@ class ActionRequestInfoClass(_Aspect):
834
842
 
835
843
  @property
836
844
  def subResource(self) -> Union[None, str]:
837
- """The sub-resource identifier that the action is associated with, for example 'fieldName'"""
845
+ """The sub-resource identifier that the action is associated with, for example 'fieldName'.
846
+ Currently, this is only used for Field Paths & schema fields."""
838
847
  return self._inner_dict.get('subResource') # type: ignore
839
848
 
840
849
  @subResource.setter
@@ -872,6 +881,17 @@ class ActionRequestInfoClass(_Aspect):
872
881
  self._inner_dict['createdBy'] = value
873
882
 
874
883
 
884
+ @property
885
+ def description(self) -> Union[None, str]:
886
+ """An optional description that can be added to the action request
887
+ to explain the intention behind it."""
888
+ return self._inner_dict.get('description') # type: ignore
889
+
890
+ @description.setter
891
+ def description(self, value: Union[None, str]) -> None:
892
+ self._inner_dict['description'] = value
893
+
894
+
875
895
  @property
876
896
  def dueDate(self) -> Union[None, int]:
877
897
  """The time at which the request is due"""
@@ -921,6 +941,9 @@ class ActionRequestParamsClass(DictWrapper):
921
941
  def __init__(self,
922
942
  glossaryTermProposal: Union[None, "GlossaryTermProposalClass"]=None,
923
943
  tagProposal: Union[None, "TagProposalClass"]=None,
944
+ domainProposal: Union[None, "DomainProposalClass"]=None,
945
+ ownerProposal: Union[None, "OwnerProposalClass"]=None,
946
+ structuredPropertyProposal: Union[None, "StructuredPropertyProposalClass"]=None,
924
947
  createGlossaryTermProposal: Union[None, "CreateGlossaryTermProposalClass"]=None,
925
948
  createGlossaryNodeProposal: Union[None, "CreateGlossaryNodeProposalClass"]=None,
926
949
  updateDescriptionProposal: Union[None, "DescriptionProposalClass"]=None,
@@ -930,6 +953,9 @@ class ActionRequestParamsClass(DictWrapper):
930
953
 
931
954
  self.glossaryTermProposal = glossaryTermProposal
932
955
  self.tagProposal = tagProposal
956
+ self.domainProposal = domainProposal
957
+ self.ownerProposal = ownerProposal
958
+ self.structuredPropertyProposal = structuredPropertyProposal
933
959
  self.createGlossaryTermProposal = createGlossaryTermProposal
934
960
  self.createGlossaryNodeProposal = createGlossaryNodeProposal
935
961
  self.updateDescriptionProposal = updateDescriptionProposal
@@ -938,6 +964,9 @@ class ActionRequestParamsClass(DictWrapper):
938
964
  def _restore_defaults(self) -> None:
939
965
  self.glossaryTermProposal = self.RECORD_SCHEMA.fields_dict["glossaryTermProposal"].default
940
966
  self.tagProposal = self.RECORD_SCHEMA.fields_dict["tagProposal"].default
967
+ self.domainProposal = self.RECORD_SCHEMA.fields_dict["domainProposal"].default
968
+ self.ownerProposal = self.RECORD_SCHEMA.fields_dict["ownerProposal"].default
969
+ self.structuredPropertyProposal = self.RECORD_SCHEMA.fields_dict["structuredPropertyProposal"].default
941
970
  self.createGlossaryTermProposal = self.RECORD_SCHEMA.fields_dict["createGlossaryTermProposal"].default
942
971
  self.createGlossaryNodeProposal = self.RECORD_SCHEMA.fields_dict["createGlossaryNodeProposal"].default
943
972
  self.updateDescriptionProposal = self.RECORD_SCHEMA.fields_dict["updateDescriptionProposal"].default
@@ -946,7 +975,8 @@ class ActionRequestParamsClass(DictWrapper):
946
975
 
947
976
  @property
948
977
  def glossaryTermProposal(self) -> Union[None, "GlossaryTermProposalClass"]:
949
- """An optional set of information specific to term proposals."""
978
+ """An optional set of information specific to term proposals.
979
+ TODO: Add validation that ensures that glossaryTerm or glossaryTerms field is provided, but not both."""
950
980
  return self._inner_dict.get('glossaryTermProposal') # type: ignore
951
981
 
952
982
  @glossaryTermProposal.setter
@@ -956,7 +986,8 @@ class ActionRequestParamsClass(DictWrapper):
956
986
 
957
987
  @property
958
988
  def tagProposal(self) -> Union[None, "TagProposalClass"]:
959
- """An optional set of information specific to tag proposals."""
989
+ """An optional set of information specific to tag proposals.
990
+ TODO: Add validation that ensures that tag or tagUrns field is provided, but not both."""
960
991
  return self._inner_dict.get('tagProposal') # type: ignore
961
992
 
962
993
  @tagProposal.setter
@@ -964,6 +995,36 @@ class ActionRequestParamsClass(DictWrapper):
964
995
  self._inner_dict['tagProposal'] = value
965
996
 
966
997
 
998
+ @property
999
+ def domainProposal(self) -> Union[None, "DomainProposalClass"]:
1000
+ """An optional set of information specific to domain proposals."""
1001
+ return self._inner_dict.get('domainProposal') # type: ignore
1002
+
1003
+ @domainProposal.setter
1004
+ def domainProposal(self, value: Union[None, "DomainProposalClass"]) -> None:
1005
+ self._inner_dict['domainProposal'] = value
1006
+
1007
+
1008
+ @property
1009
+ def ownerProposal(self) -> Union[None, "OwnerProposalClass"]:
1010
+ """An optional set of information specific to ownership proposals."""
1011
+ return self._inner_dict.get('ownerProposal') # type: ignore
1012
+
1013
+ @ownerProposal.setter
1014
+ def ownerProposal(self, value: Union[None, "OwnerProposalClass"]) -> None:
1015
+ self._inner_dict['ownerProposal'] = value
1016
+
1017
+
1018
+ @property
1019
+ def structuredPropertyProposal(self) -> Union[None, "StructuredPropertyProposalClass"]:
1020
+ """An optional set of information specific to structured property proposals."""
1021
+ return self._inner_dict.get('structuredPropertyProposal') # type: ignore
1022
+
1023
+ @structuredPropertyProposal.setter
1024
+ def structuredPropertyProposal(self, value: Union[None, "StructuredPropertyProposalClass"]) -> None:
1025
+ self._inner_dict['structuredPropertyProposal'] = value
1026
+
1027
+
967
1028
  @property
968
1029
  def createGlossaryTermProposal(self) -> Union[None, "CreateGlossaryTermProposalClass"]:
969
1030
  """An optional set of information specific to proposals for creating new Glossary Terms."""
@@ -1016,16 +1077,19 @@ class ActionRequestStatusClass(_Aspect):
1016
1077
  status: str,
1017
1078
  lastModified: "AuditStampClass",
1018
1079
  result: Union[None, str]=None,
1080
+ note: Union[None, str]=None,
1019
1081
  ):
1020
1082
  super().__init__()
1021
1083
 
1022
1084
  self.status = status
1023
1085
  self.result = result
1086
+ self.note = note
1024
1087
  self.lastModified = lastModified
1025
1088
 
1026
1089
  def _restore_defaults(self) -> None:
1027
1090
  self.status = str()
1028
1091
  self.result = self.RECORD_SCHEMA.fields_dict["result"].default
1092
+ self.note = self.RECORD_SCHEMA.fields_dict["note"].default
1029
1093
  self.lastModified = AuditStampClass._construct_with_defaults()
1030
1094
 
1031
1095
 
@@ -1049,6 +1113,17 @@ class ActionRequestStatusClass(_Aspect):
1049
1113
  self._inner_dict['result'] = value
1050
1114
 
1051
1115
 
1116
+ @property
1117
+ def note(self) -> Union[None, str]:
1118
+ """Optional note associated with the status.
1119
+ E.g. if the request is rejected, the reason for rejection. If the request is approved, the reason for approval."""
1120
+ return self._inner_dict.get('note') # type: ignore
1121
+
1122
+ @note.setter
1123
+ def note(self, value: Union[None, str]) -> None:
1124
+ self._inner_dict['note'] = value
1125
+
1126
+
1052
1127
  @property
1053
1128
  def lastModified(self) -> "AuditStampClass":
1054
1129
  """Audit stamp containing who last modified the status and when."""
@@ -1265,56 +1340,164 @@ class DescriptionProposalClass(DictWrapper):
1265
1340
  self._inner_dict['description'] = value
1266
1341
 
1267
1342
 
1343
+ class DomainProposalClass(DictWrapper):
1344
+ # No docs available.
1345
+
1346
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.actionrequest.DomainProposal")
1347
+ def __init__(self,
1348
+ domains: List[str],
1349
+ ):
1350
+ super().__init__()
1351
+
1352
+ self.domains = domains
1353
+
1354
+ def _restore_defaults(self) -> None:
1355
+ self.domains = list()
1356
+
1357
+
1358
+ @property
1359
+ def domains(self) -> List[str]:
1360
+ """The urns of the domain(s) being proposed. Currently, only 1 domain is supported per asset.
1361
+ If this changes in the future, this data modeling will suffice."""
1362
+ return self._inner_dict.get('domains') # type: ignore
1363
+
1364
+ @domains.setter
1365
+ def domains(self, value: List[str]) -> None:
1366
+ self._inner_dict['domains'] = value
1367
+
1368
+
1268
1369
  class GlossaryTermProposalClass(DictWrapper):
1269
1370
  # No docs available.
1270
1371
 
1271
1372
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.actionrequest.GlossaryTermProposal")
1272
1373
  def __init__(self,
1273
- glossaryTerm: str,
1374
+ glossaryTerm: Union[None, str]=None,
1375
+ glossaryTerms: Union[None, List[str]]=None,
1274
1376
  ):
1275
1377
  super().__init__()
1276
1378
 
1277
1379
  self.glossaryTerm = glossaryTerm
1380
+ self.glossaryTerms = glossaryTerms
1278
1381
 
1279
1382
  def _restore_defaults(self) -> None:
1280
- self.glossaryTerm = str()
1383
+ self.glossaryTerm = self.RECORD_SCHEMA.fields_dict["glossaryTerm"].default
1384
+ self.glossaryTerms = self.RECORD_SCHEMA.fields_dict["glossaryTerms"].default
1281
1385
 
1282
1386
 
1283
1387
  @property
1284
- def glossaryTerm(self) -> str:
1285
- """The urn of the glossary term being proposed."""
1388
+ def glossaryTerm(self) -> Union[None, str]:
1389
+ """This field is deprecated and will be removed in a future version. Use glossaryTerms instead.
1390
+ The urn of the glossary term being proposed."""
1286
1391
  return self._inner_dict.get('glossaryTerm') # type: ignore
1287
1392
 
1288
1393
  @glossaryTerm.setter
1289
- def glossaryTerm(self, value: str) -> None:
1394
+ def glossaryTerm(self, value: Union[None, str]) -> None:
1290
1395
  self._inner_dict['glossaryTerm'] = value
1291
1396
 
1292
1397
 
1398
+ @property
1399
+ def glossaryTerms(self) -> Union[None, List[str]]:
1400
+ """The urns of the glossary terms being proposed.
1401
+ Use this field over glossaryTerm."""
1402
+ return self._inner_dict.get('glossaryTerms') # type: ignore
1403
+
1404
+ @glossaryTerms.setter
1405
+ def glossaryTerms(self, value: Union[None, List[str]]) -> None:
1406
+ self._inner_dict['glossaryTerms'] = value
1407
+
1408
+
1409
+ class OwnerProposalClass(DictWrapper):
1410
+ # No docs available.
1411
+
1412
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.actionrequest.OwnerProposal")
1413
+ def __init__(self,
1414
+ owners: Union[None, List["OwnerClass"]]=None,
1415
+ ):
1416
+ super().__init__()
1417
+
1418
+ self.owners = owners
1419
+
1420
+ def _restore_defaults(self) -> None:
1421
+ self.owners = self.RECORD_SCHEMA.fields_dict["owners"].default
1422
+
1423
+
1424
+ @property
1425
+ def owners(self) -> Union[None, List["OwnerClass"]]:
1426
+ """The urns of the owner(s) being proposed."""
1427
+ return self._inner_dict.get('owners') # type: ignore
1428
+
1429
+ @owners.setter
1430
+ def owners(self, value: Union[None, List["OwnerClass"]]) -> None:
1431
+ self._inner_dict['owners'] = value
1432
+
1433
+
1434
+ class StructuredPropertyProposalClass(DictWrapper):
1435
+ # No docs available.
1436
+
1437
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.actionrequest.StructuredPropertyProposal")
1438
+ def __init__(self,
1439
+ structuredPropertyValues: Union[None, List["StructuredPropertyValueAssignmentClass"]]=None,
1440
+ ):
1441
+ super().__init__()
1442
+
1443
+ self.structuredPropertyValues = structuredPropertyValues
1444
+
1445
+ def _restore_defaults(self) -> None:
1446
+ self.structuredPropertyValues = self.RECORD_SCHEMA.fields_dict["structuredPropertyValues"].default
1447
+
1448
+
1449
+ @property
1450
+ def structuredPropertyValues(self) -> Union[None, List["StructuredPropertyValueAssignmentClass"]]:
1451
+ """The urns of the domain(s) being proposed. Currently, only 1 domain is supported per asset.
1452
+ If this changes in the future, the data model will be ready.
1453
+ TODO: Decide if indexing the value would also be useful."""
1454
+ return self._inner_dict.get('structuredPropertyValues') # type: ignore
1455
+
1456
+ @structuredPropertyValues.setter
1457
+ def structuredPropertyValues(self, value: Union[None, List["StructuredPropertyValueAssignmentClass"]]) -> None:
1458
+ self._inner_dict['structuredPropertyValues'] = value
1459
+
1460
+
1293
1461
  class TagProposalClass(DictWrapper):
1294
1462
  # No docs available.
1295
1463
 
1296
1464
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.actionrequest.TagProposal")
1297
1465
  def __init__(self,
1298
- tag: str,
1466
+ tag: Union[None, str]=None,
1467
+ tags: Union[None, List[str]]=None,
1299
1468
  ):
1300
1469
  super().__init__()
1301
1470
 
1302
1471
  self.tag = tag
1472
+ self.tags = tags
1303
1473
 
1304
1474
  def _restore_defaults(self) -> None:
1305
- self.tag = str()
1475
+ self.tag = self.RECORD_SCHEMA.fields_dict["tag"].default
1476
+ self.tags = self.RECORD_SCHEMA.fields_dict["tags"].default
1306
1477
 
1307
1478
 
1308
1479
  @property
1309
- def tag(self) -> str:
1310
- """The urn of the tag being proposed."""
1480
+ def tag(self) -> Union[None, str]:
1481
+ """This field is deprecated and will be removed in a future version. Use tags instead.
1482
+ The urn of the tag being proposed."""
1311
1483
  return self._inner_dict.get('tag') # type: ignore
1312
1484
 
1313
1485
  @tag.setter
1314
- def tag(self, value: str) -> None:
1486
+ def tag(self, value: Union[None, str]) -> None:
1315
1487
  self._inner_dict['tag'] = value
1316
1488
 
1317
1489
 
1490
+ @property
1491
+ def tags(self) -> Union[None, List[str]]:
1492
+ """The urns of the glossary terms being proposed.
1493
+ Use this field over glossaryTerm."""
1494
+ return self._inner_dict.get('tags') # type: ignore
1495
+
1496
+ @tags.setter
1497
+ def tags(self, value: Union[None, List[str]]) -> None:
1498
+ self._inner_dict['tags'] = value
1499
+
1500
+
1318
1501
  class AiInferenceMetadataClass(_Aspect):
1319
1502
  """AI Inference Metadata of various types."""
1320
1503
 
@@ -3296,13 +3479,15 @@ class AssertionSourceTypeClass(object):
3296
3479
  # No docs available.
3297
3480
 
3298
3481
  NATIVE = "NATIVE"
3299
- """The assertion was defined natively on DataHub by a user."""
3482
+ """The assertion was defined natively on DataHub by a user.
3483
+ DataHub Cloud only"""
3300
3484
 
3301
3485
  EXTERNAL = "EXTERNAL"
3302
3486
  """The assertion was defined and managed externally of DataHub."""
3303
3487
 
3304
3488
  INFERRED = "INFERRED"
3305
- """The assertion was inferred, e.g. from offline AI / ML models."""
3489
+ """The assertion was inferred, e.g. from offline AI / ML models.
3490
+ DataHub Cloud only"""
3306
3491
 
3307
3492
 
3308
3493
 
@@ -8759,7 +8944,7 @@ class OperationTypeClass(object):
8759
8944
  """Asset was dropped"""
8760
8945
 
8761
8946
  CUSTOM = "CUSTOM"
8762
- """Custom asset operation"""
8947
+ """Custom asset operation. If this is set, ensure customOperationType is filled out."""
8763
8948
 
8764
8949
  UNKNOWN = "UNKNOWN"
8765
8950
 
@@ -9866,6 +10051,7 @@ class VersionPropertiesClass(_Aspect):
9866
10051
  sortId: str,
9867
10052
  aliases: Optional[List["VersionTagClass"]]=None,
9868
10053
  comment: Union[None, str]=None,
10054
+ versioningScheme: Optional[Union[str, "VersioningSchemeClass"]]=None,
9869
10055
  sourceCreatedTimestamp: Union[None, "AuditStampClass"]=None,
9870
10056
  metadataCreatedTimestamp: Union[None, "AuditStampClass"]=None,
9871
10057
  isLatest: Union[None, bool]=None,
@@ -9881,6 +10067,11 @@ class VersionPropertiesClass(_Aspect):
9881
10067
  self.aliases = aliases
9882
10068
  self.comment = comment
9883
10069
  self.sortId = sortId
10070
+ if versioningScheme is None:
10071
+ # default: 'LEXICOGRAPHIC_STRING'
10072
+ self.versioningScheme = self.RECORD_SCHEMA.fields_dict["versioningScheme"].default
10073
+ else:
10074
+ self.versioningScheme = versioningScheme
9884
10075
  self.sourceCreatedTimestamp = sourceCreatedTimestamp
9885
10076
  self.metadataCreatedTimestamp = metadataCreatedTimestamp
9886
10077
  self.isLatest = isLatest
@@ -9891,6 +10082,7 @@ class VersionPropertiesClass(_Aspect):
9891
10082
  self.aliases = list()
9892
10083
  self.comment = self.RECORD_SCHEMA.fields_dict["comment"].default
9893
10084
  self.sortId = str()
10085
+ self.versioningScheme = self.RECORD_SCHEMA.fields_dict["versioningScheme"].default
9894
10086
  self.sourceCreatedTimestamp = self.RECORD_SCHEMA.fields_dict["sourceCreatedTimestamp"].default
9895
10087
  self.metadataCreatedTimestamp = self.RECORD_SCHEMA.fields_dict["metadataCreatedTimestamp"].default
9896
10088
  self.isLatest = self.RECORD_SCHEMA.fields_dict["isLatest"].default
@@ -9947,6 +10139,17 @@ class VersionPropertiesClass(_Aspect):
9947
10139
  self._inner_dict['sortId'] = value
9948
10140
 
9949
10141
 
10142
+ @property
10143
+ def versioningScheme(self) -> Union[str, "VersioningSchemeClass"]:
10144
+ """What versioning scheme `sortId` belongs to.
10145
+ Defaults to a plain string that is lexicographically sorted."""
10146
+ return self._inner_dict.get('versioningScheme') # type: ignore
10147
+
10148
+ @versioningScheme.setter
10149
+ def versioningScheme(self, value: Union[str, "VersioningSchemeClass"]) -> None:
10150
+ self._inner_dict['versioningScheme'] = value
10151
+
10152
+
9950
10153
  @property
9951
10154
  def sourceCreatedTimestamp(self) -> Union[None, "AuditStampClass"]:
9952
10155
  """Timestamp reflecting when this asset version was created in the source system."""
@@ -13201,6 +13404,113 @@ class DataPlatformInstancePropertiesClass(_Aspect):
13201
13404
  self._inner_dict['description'] = value
13202
13405
 
13203
13406
 
13407
+ class IcebergWarehouseInfoClass(_Aspect):
13408
+ """An Iceberg warehouse location and credentails whose read/writes are governed by datahub catalog."""
13409
+
13410
+
13411
+ ASPECT_NAME = 'icebergWarehouseInfo'
13412
+ ASPECT_INFO = {}
13413
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.dataplatforminstance.IcebergWarehouseInfo")
13414
+
13415
+ def __init__(self,
13416
+ dataRoot: str,
13417
+ clientId: str,
13418
+ clientSecret: str,
13419
+ region: str,
13420
+ env: Union[str, "FabricTypeClass"],
13421
+ role: Union[None, str]=None,
13422
+ tempCredentialExpirationSeconds: Union[None, int]=None,
13423
+ ):
13424
+ super().__init__()
13425
+
13426
+ self.dataRoot = dataRoot
13427
+ self.clientId = clientId
13428
+ self.clientSecret = clientSecret
13429
+ self.region = region
13430
+ self.role = role
13431
+ self.tempCredentialExpirationSeconds = tempCredentialExpirationSeconds
13432
+ self.env = env
13433
+
13434
+ def _restore_defaults(self) -> None:
13435
+ self.dataRoot = str()
13436
+ self.clientId = str()
13437
+ self.clientSecret = str()
13438
+ self.region = str()
13439
+ self.role = self.RECORD_SCHEMA.fields_dict["role"].default
13440
+ self.tempCredentialExpirationSeconds = self.RECORD_SCHEMA.fields_dict["tempCredentialExpirationSeconds"].default
13441
+ self.env = FabricTypeClass.DEV
13442
+
13443
+
13444
+ @property
13445
+ def dataRoot(self) -> str:
13446
+ """Path of the root for the backing store of the tables in the warehouse."""
13447
+ return self._inner_dict.get('dataRoot') # type: ignore
13448
+
13449
+ @dataRoot.setter
13450
+ def dataRoot(self, value: str) -> None:
13451
+ self._inner_dict['dataRoot'] = value
13452
+
13453
+
13454
+ @property
13455
+ def clientId(self) -> str:
13456
+ """clientId to be used to authenticate with storage hosting this warehouse"""
13457
+ return self._inner_dict.get('clientId') # type: ignore
13458
+
13459
+ @clientId.setter
13460
+ def clientId(self, value: str) -> None:
13461
+ self._inner_dict['clientId'] = value
13462
+
13463
+
13464
+ @property
13465
+ def clientSecret(self) -> str:
13466
+ """client secret to authenticate with storage hosting this warehouse"""
13467
+ return self._inner_dict.get('clientSecret') # type: ignore
13468
+
13469
+ @clientSecret.setter
13470
+ def clientSecret(self, value: str) -> None:
13471
+ self._inner_dict['clientSecret'] = value
13472
+
13473
+
13474
+ @property
13475
+ def region(self) -> str:
13476
+ """region where the warehouse is located."""
13477
+ return self._inner_dict.get('region') # type: ignore
13478
+
13479
+ @region.setter
13480
+ def region(self, value: str) -> None:
13481
+ self._inner_dict['region'] = value
13482
+
13483
+
13484
+ @property
13485
+ def role(self) -> Union[None, str]:
13486
+ # No docs available.
13487
+ return self._inner_dict.get('role') # type: ignore
13488
+
13489
+ @role.setter
13490
+ def role(self, value: Union[None, str]) -> None:
13491
+ self._inner_dict['role'] = value
13492
+
13493
+
13494
+ @property
13495
+ def tempCredentialExpirationSeconds(self) -> Union[None, int]:
13496
+ # No docs available.
13497
+ return self._inner_dict.get('tempCredentialExpirationSeconds') # type: ignore
13498
+
13499
+ @tempCredentialExpirationSeconds.setter
13500
+ def tempCredentialExpirationSeconds(self, value: Union[None, int]) -> None:
13501
+ self._inner_dict['tempCredentialExpirationSeconds'] = value
13502
+
13503
+
13504
+ @property
13505
+ def env(self) -> Union[str, "FabricTypeClass"]:
13506
+ # No docs available.
13507
+ return self._inner_dict.get('env') # type: ignore
13508
+
13509
+ @env.setter
13510
+ def env(self, value: Union[str, "FabricTypeClass"]) -> None:
13511
+ self._inner_dict['env'] = value
13512
+
13513
+
13204
13514
  class DataProcessInfoClass(_Aspect):
13205
13515
  """The inputs and outputs of this data process"""
13206
13516
 
@@ -13253,18 +13563,21 @@ class DataProcessInstanceInputClass(_Aspect):
13253
13563
 
13254
13564
  def __init__(self,
13255
13565
  inputs: List[str],
13566
+ inputEdges: Union[None, List["EdgeClass"]]=None,
13256
13567
  ):
13257
13568
  super().__init__()
13258
13569
 
13259
13570
  self.inputs = inputs
13571
+ self.inputEdges = inputEdges
13260
13572
 
13261
13573
  def _restore_defaults(self) -> None:
13262
13574
  self.inputs = list()
13575
+ self.inputEdges = self.RECORD_SCHEMA.fields_dict["inputEdges"].default
13263
13576
 
13264
13577
 
13265
13578
  @property
13266
13579
  def inputs(self) -> List[str]:
13267
- """Input datasets to be consumed"""
13580
+ """Input assets consumed"""
13268
13581
  return self._inner_dict.get('inputs') # type: ignore
13269
13582
 
13270
13583
  @inputs.setter
@@ -13272,6 +13585,18 @@ class DataProcessInstanceInputClass(_Aspect):
13272
13585
  self._inner_dict['inputs'] = value
13273
13586
 
13274
13587
 
13588
+ @property
13589
+ def inputEdges(self) -> Union[None, List["EdgeClass"]]:
13590
+ """Input assets consumed by the data process instance, with additional metadata.
13591
+ Counts as lineage.
13592
+ Will eventually deprecate the inputs field."""
13593
+ return self._inner_dict.get('inputEdges') # type: ignore
13594
+
13595
+ @inputEdges.setter
13596
+ def inputEdges(self, value: Union[None, List["EdgeClass"]]) -> None:
13597
+ self._inner_dict['inputEdges'] = value
13598
+
13599
+
13275
13600
  class DataProcessInstanceOutputClass(_Aspect):
13276
13601
  """Information about the outputs of a Data process"""
13277
13602
 
@@ -13282,18 +13607,21 @@ class DataProcessInstanceOutputClass(_Aspect):
13282
13607
 
13283
13608
  def __init__(self,
13284
13609
  outputs: List[str],
13610
+ outputEdges: Union[None, List["EdgeClass"]]=None,
13285
13611
  ):
13286
13612
  super().__init__()
13287
13613
 
13288
13614
  self.outputs = outputs
13615
+ self.outputEdges = outputEdges
13289
13616
 
13290
13617
  def _restore_defaults(self) -> None:
13291
13618
  self.outputs = list()
13619
+ self.outputEdges = self.RECORD_SCHEMA.fields_dict["outputEdges"].default
13292
13620
 
13293
13621
 
13294
13622
  @property
13295
13623
  def outputs(self) -> List[str]:
13296
- """Output datasets to be produced"""
13624
+ """Output assets produced"""
13297
13625
  return self._inner_dict.get('outputs') # type: ignore
13298
13626
 
13299
13627
  @outputs.setter
@@ -13301,6 +13629,18 @@ class DataProcessInstanceOutputClass(_Aspect):
13301
13629
  self._inner_dict['outputs'] = value
13302
13630
 
13303
13631
 
13632
+ @property
13633
+ def outputEdges(self) -> Union[None, List["EdgeClass"]]:
13634
+ """Output assets produced by the data process instance during processing, with additional metadata.
13635
+ Counts as lineage.
13636
+ Will eventually deprecate the outputs field."""
13637
+ return self._inner_dict.get('outputEdges') # type: ignore
13638
+
13639
+ @outputEdges.setter
13640
+ def outputEdges(self, value: Union[None, List["EdgeClass"]]) -> None:
13641
+ self._inner_dict['outputEdges'] = value
13642
+
13643
+
13304
13644
  class DataProcessInstancePropertiesClass(_Aspect):
13305
13645
  """The inputs and outputs of this data process"""
13306
13646
 
@@ -15186,6 +15526,49 @@ class HistogramClass(DictWrapper):
15186
15526
  self._inner_dict['heights'] = value
15187
15527
 
15188
15528
 
15529
+ class IcebergCatalogInfoClass(_Aspect):
15530
+ """Iceberg Catalog metadata associated with an Iceberg table/view"""
15531
+
15532
+
15533
+ ASPECT_NAME = 'icebergCatalogInfo'
15534
+ ASPECT_INFO = {}
15535
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.dataset.IcebergCatalogInfo")
15536
+
15537
+ def __init__(self,
15538
+ metadataPointer: Union[None, str]=None,
15539
+ view: Union[None, bool]=None,
15540
+ ):
15541
+ super().__init__()
15542
+
15543
+ self.metadataPointer = metadataPointer
15544
+ self.view = view
15545
+
15546
+ def _restore_defaults(self) -> None:
15547
+ self.metadataPointer = self.RECORD_SCHEMA.fields_dict["metadataPointer"].default
15548
+ self.view = self.RECORD_SCHEMA.fields_dict["view"].default
15549
+
15550
+
15551
+ @property
15552
+ def metadataPointer(self) -> Union[None, str]:
15553
+ """When Datahub is the REST Catalog for an Iceberg Table, stores the current metadata pointer.
15554
+ If the Iceberg table is managed by an external catalog, the metadata pointer is not set."""
15555
+ return self._inner_dict.get('metadataPointer') # type: ignore
15556
+
15557
+ @metadataPointer.setter
15558
+ def metadataPointer(self, value: Union[None, str]) -> None:
15559
+ self._inner_dict['metadataPointer'] = value
15560
+
15561
+
15562
+ @property
15563
+ def view(self) -> Union[None, bool]:
15564
+ # No docs available.
15565
+ return self._inner_dict.get('view') # type: ignore
15566
+
15567
+ @view.setter
15568
+ def view(self, value: Union[None, bool]) -> None:
15569
+ self._inner_dict['view'] = value
15570
+
15571
+
15189
15572
  class PartitionSummaryClass(DictWrapper):
15190
15573
  """Defines how the data is partitioned"""
15191
15574
 
@@ -16969,7 +17352,7 @@ class ExecutionRequestInputClass(_Aspect):
16969
17352
 
16970
17353
  @property
16971
17354
  def executorId(self) -> str:
16972
- """Advanced: specify a specific executor to route the request to. If none is provided, a "default" executor is used."""
17355
+ """Advanced: specify a specific executor pool to route the request to. If none is provided, a "default" embedded executor is used."""
16973
17356
  return self._inner_dict.get('executorId') # type: ignore
16974
17357
 
16975
17358
  @executorId.setter
@@ -17148,7 +17531,7 @@ class ExecutionRequestSignalClass(_Aspect):
17148
17531
 
17149
17532
  @property
17150
17533
  def executorId(self) -> Union[None, str]:
17151
- """Advanced: specify a specific executor to route the request to. If none is provided, a "default" executor is used."""
17534
+ """Advanced: specify a specific executor pool to route the request to. If none is provided, a default embedded executor is used."""
17152
17535
  return self._inner_dict.get('executorId') # type: ignore
17153
17536
 
17154
17537
  @executorId.setter
@@ -17279,7 +17662,7 @@ class RemoteExecutorStatusClass(_Aspect):
17279
17662
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.executor.RemoteExecutorStatus")
17280
17663
 
17281
17664
  def __init__(self,
17282
- poolName: str,
17665
+ executorPoolId: str,
17283
17666
  executorReleaseVersion: str,
17284
17667
  executorAddress: str,
17285
17668
  executorHostname: str,
@@ -17293,7 +17676,7 @@ class RemoteExecutorStatusClass(_Aspect):
17293
17676
  ):
17294
17677
  super().__init__()
17295
17678
 
17296
- self.poolName = poolName
17679
+ self.executorPoolId = executorPoolId
17297
17680
  self.executorReleaseVersion = executorReleaseVersion
17298
17681
  self.executorAddress = executorAddress
17299
17682
  self.executorHostname = executorHostname
@@ -17326,7 +17709,7 @@ class RemoteExecutorStatusClass(_Aspect):
17326
17709
  self.reportedAt = reportedAt
17327
17710
 
17328
17711
  def _restore_defaults(self) -> None:
17329
- self.poolName = str()
17712
+ self.executorPoolId = str()
17330
17713
  self.executorReleaseVersion = str()
17331
17714
  self.executorAddress = str()
17332
17715
  self.executorHostname = str()
@@ -17340,13 +17723,13 @@ class RemoteExecutorStatusClass(_Aspect):
17340
17723
 
17341
17724
 
17342
17725
  @property
17343
- def poolName(self) -> str:
17344
- """References the 'name' defined in RemoteExecutorPoolKey"""
17345
- return self._inner_dict.get('poolName') # type: ignore
17726
+ def executorPoolId(self) -> str:
17727
+ """References the 'id' defined in RemoteExecutorPoolKey"""
17728
+ return self._inner_dict.get('executorPoolId') # type: ignore
17346
17729
 
17347
- @poolName.setter
17348
- def poolName(self, value: str) -> None:
17349
- self._inner_dict['poolName'] = value
17730
+ @executorPoolId.setter
17731
+ def executorPoolId(self, value: str) -> None:
17732
+ self._inner_dict['executorPoolId'] = value
17350
17733
 
17351
17734
 
17352
17735
  @property
@@ -17391,7 +17774,8 @@ class RemoteExecutorStatusClass(_Aspect):
17391
17774
 
17392
17775
  @property
17393
17776
  def executorExpired(self) -> bool:
17394
- """Flag indicating whether remote executor status record is stale."""
17777
+ """Flag indicating whether remote executor status record is stale.
17778
+ This means executor no longer sends heartbeats and considered dead."""
17395
17779
  return self._inner_dict.get('executorExpired') # type: ignore
17396
17780
 
17397
17781
  @executorExpired.setter
@@ -17401,7 +17785,7 @@ class RemoteExecutorStatusClass(_Aspect):
17401
17785
 
17402
17786
  @property
17403
17787
  def executorStopped(self) -> bool:
17404
- """Flag indicating whether remote executor is stopped."""
17788
+ """Flag indicating whether remote executor is stopped, and properly reported its termination."""
17405
17789
  return self._inner_dict.get('executorStopped') # type: ignore
17406
17790
 
17407
17791
  @executorStopped.setter
@@ -17411,7 +17795,7 @@ class RemoteExecutorStatusClass(_Aspect):
17411
17795
 
17412
17796
  @property
17413
17797
  def executorEmbedded(self) -> bool:
17414
- """Flag indicating whether remote executor is embedded executor"""
17798
+ """Flag indicating whether remote executor is embedded into the coordinator pod"""
17415
17799
  return self._inner_dict.get('executorEmbedded') # type: ignore
17416
17800
 
17417
17801
  @executorEmbedded.setter
@@ -17450,6 +17834,36 @@ class RemoteExecutorStatusClass(_Aspect):
17450
17834
  self._inner_dict['reportedAt'] = value
17451
17835
 
17452
17836
 
17837
+ class RemoteExecutorPoolGlobalConfigClass(_Aspect):
17838
+ """Global singleton storing configs for remote executor pools."""
17839
+
17840
+
17841
+ ASPECT_NAME = 'dataHubRemoteExecutorPoolGlobalConfig'
17842
+ ASPECT_INFO = {}
17843
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.executorglobalconfig.RemoteExecutorPoolGlobalConfig")
17844
+
17845
+ def __init__(self,
17846
+ defaultExecutorPoolId: str,
17847
+ ):
17848
+ super().__init__()
17849
+
17850
+ self.defaultExecutorPoolId = defaultExecutorPoolId
17851
+
17852
+ def _restore_defaults(self) -> None:
17853
+ self.defaultExecutorPoolId = str()
17854
+
17855
+
17856
+ @property
17857
+ def defaultExecutorPoolId(self) -> str:
17858
+ """The default pool to use for tasks that require remote executors.
17859
+ References 'id' in com.linkedin.pegasus2avro.metadata.key.RemoteExecutorPoolKey"""
17860
+ return self._inner_dict.get('defaultExecutorPoolId') # type: ignore
17861
+
17862
+ @defaultExecutorPoolId.setter
17863
+ def defaultExecutorPoolId(self, value: str) -> None:
17864
+ self._inner_dict['defaultExecutorPoolId'] = value
17865
+
17866
+
17453
17867
  class RemoteExecutorPoolInfoClass(_Aspect):
17454
17868
  # No docs available.
17455
17869
 
@@ -17460,13 +17874,28 @@ class RemoteExecutorPoolInfoClass(_Aspect):
17460
17874
 
17461
17875
  def __init__(self,
17462
17876
  createdAt: int,
17877
+ creator: Union[None, str]=None,
17878
+ description: Union[None, str]=None,
17879
+ queueUrl: Union[None, str]=None,
17880
+ isEmbedded: Union[None, bool]=None,
17881
+ state: Union[None, "RemoteExecutorPoolStateClass"]=None,
17463
17882
  ):
17464
17883
  super().__init__()
17465
17884
 
17466
17885
  self.createdAt = createdAt
17886
+ self.creator = creator
17887
+ self.description = description
17888
+ self.queueUrl = queueUrl
17889
+ self.isEmbedded = isEmbedded
17890
+ self.state = state
17467
17891
 
17468
17892
  def _restore_defaults(self) -> None:
17469
17893
  self.createdAt = int()
17894
+ self.creator = self.RECORD_SCHEMA.fields_dict["creator"].default
17895
+ self.description = self.RECORD_SCHEMA.fields_dict["description"].default
17896
+ self.queueUrl = self.RECORD_SCHEMA.fields_dict["queueUrl"].default
17897
+ self.isEmbedded = self.RECORD_SCHEMA.fields_dict["isEmbedded"].default
17898
+ self.state = self.RECORD_SCHEMA.fields_dict["state"].default
17470
17899
 
17471
17900
 
17472
17901
  @property
@@ -17479,6 +17908,111 @@ class RemoteExecutorPoolInfoClass(_Aspect):
17479
17908
  self._inner_dict['createdAt'] = value
17480
17909
 
17481
17910
 
17911
+ @property
17912
+ def creator(self) -> Union[None, str]:
17913
+ """The creator of this pool"""
17914
+ return self._inner_dict.get('creator') # type: ignore
17915
+
17916
+ @creator.setter
17917
+ def creator(self, value: Union[None, str]) -> None:
17918
+ self._inner_dict['creator'] = value
17919
+
17920
+
17921
+ @property
17922
+ def description(self) -> Union[None, str]:
17923
+ """A description for this pool"""
17924
+ return self._inner_dict.get('description') # type: ignore
17925
+
17926
+ @description.setter
17927
+ def description(self, value: Union[None, str]) -> None:
17928
+ self._inner_dict['description'] = value
17929
+
17930
+
17931
+ @property
17932
+ def queueUrl(self) -> Union[None, str]:
17933
+ """The url to the task queue for this pool. I.e., SQS queue url."""
17934
+ return self._inner_dict.get('queueUrl') # type: ignore
17935
+
17936
+ @queueUrl.setter
17937
+ def queueUrl(self, value: Union[None, str]) -> None:
17938
+ self._inner_dict['queueUrl'] = value
17939
+
17940
+
17941
+ @property
17942
+ def isEmbedded(self) -> Union[None, bool]:
17943
+ """Only set true if this is the pool embedded within the DataHub Cloud deployment"""
17944
+ return self._inner_dict.get('isEmbedded') # type: ignore
17945
+
17946
+ @isEmbedded.setter
17947
+ def isEmbedded(self, value: Union[None, bool]) -> None:
17948
+ self._inner_dict['isEmbedded'] = value
17949
+
17950
+
17951
+ @property
17952
+ def state(self) -> Union[None, "RemoteExecutorPoolStateClass"]:
17953
+ """The status of the remote executor pool"""
17954
+ return self._inner_dict.get('state') # type: ignore
17955
+
17956
+ @state.setter
17957
+ def state(self, value: Union[None, "RemoteExecutorPoolStateClass"]) -> None:
17958
+ self._inner_dict['state'] = value
17959
+
17960
+
17961
+ class RemoteExecutorPoolStateClass(DictWrapper):
17962
+ # No docs available.
17963
+
17964
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.executorpool.RemoteExecutorPoolState")
17965
+ def __init__(self,
17966
+ status: Union[str, "RemoteExecutorPoolStatusClass"],
17967
+ message: Union[None, str]=None,
17968
+ ):
17969
+ super().__init__()
17970
+
17971
+ self.status = status
17972
+ self.message = message
17973
+
17974
+ def _restore_defaults(self) -> None:
17975
+ self.status = RemoteExecutorPoolStatusClass.PROVISIONING_PENDING
17976
+ self.message = self.RECORD_SCHEMA.fields_dict["message"].default
17977
+
17978
+
17979
+ @property
17980
+ def status(self) -> Union[str, "RemoteExecutorPoolStatusClass"]:
17981
+ """The status of the remote executor pool"""
17982
+ return self._inner_dict.get('status') # type: ignore
17983
+
17984
+ @status.setter
17985
+ def status(self, value: Union[str, "RemoteExecutorPoolStatusClass"]) -> None:
17986
+ self._inner_dict['status'] = value
17987
+
17988
+
17989
+ @property
17990
+ def message(self) -> Union[None, str]:
17991
+ """The message associated with the status. I.e., an error message explaining failure."""
17992
+ return self._inner_dict.get('message') # type: ignore
17993
+
17994
+ @message.setter
17995
+ def message(self, value: Union[None, str]) -> None:
17996
+ self._inner_dict['message'] = value
17997
+
17998
+
17999
+ class RemoteExecutorPoolStatusClass(object):
18000
+ # No docs available.
18001
+
18002
+ PROVISIONING_PENDING = "PROVISIONING_PENDING"
18003
+ """The pool is pending provisioning. Default state on creation."""
18004
+
18005
+ PROVISIONING_IN_PROGRESS = "PROVISIONING_IN_PROGRESS"
18006
+ """The pool has been picked up by DataHub and is in the process of being provisioned."""
18007
+
18008
+ PROVISIONING_FAILED = "PROVISIONING_FAILED"
18009
+ """The pool has failed to be provisioned."""
18010
+
18011
+ READY = "READY"
18012
+ """The pool has been successfully provisioned and is ready to accept tasks."""
18013
+
18014
+
18015
+
17482
18016
  class DomainParamsClass(DictWrapper):
17483
18017
  """Information specific to glossary terms prompts."""
17484
18018
 
@@ -20662,7 +21196,7 @@ class DataHubIngestionSourceConfigClass(DictWrapper):
20662
21196
 
20663
21197
  @property
20664
21198
  def executorId(self) -> Union[None, str]:
20665
- """The id of the executor to use to execute the ingestion run"""
21199
+ """The id of the executor pool to use to execute the ingestion run"""
20666
21200
  return self._inner_dict.get('executorId') # type: ignore
20667
21201
 
20668
21202
  @executorId.setter
@@ -21015,7 +21549,7 @@ class ChartKeyClass(_Aspect):
21015
21549
 
21016
21550
 
21017
21551
  ASPECT_NAME = 'chartKey'
21018
- ASPECT_INFO = {'keyForEntity': 'chart', 'entityCategory': 'core', 'entityAspects': ['chartInfo', 'editableChartProperties', 'chartQuery', 'inputFields', 'usageFeatures', 'lineageFeatures', 'chartUsageStatistics', 'embed', 'proposals', 'browsePaths', 'domains', 'container', 'deprecation', 'ownership', 'status', 'institutionalMemory', 'dataPlatformInstance', 'globalTags', 'glossaryTerms', 'browsePathsV2', 'subTypes', 'structuredProperties', 'incidentsSummary', 'forms', 'testResults', 'share', 'origin', 'documentation']}
21552
+ ASPECT_INFO = {'keyForEntity': 'chart', 'entityCategory': 'core', 'entityAspects': ['chartInfo', 'editableChartProperties', 'chartQuery', 'inputFields', 'chartUsageStatistics', 'embed', 'browsePaths', 'domains', 'container', 'deprecation', 'ownership', 'status', 'institutionalMemory', 'dataPlatformInstance', 'globalTags', 'glossaryTerms', 'browsePathsV2', 'subTypes', 'structuredProperties', 'incidentsSummary', 'forms', 'testResults', 'usageFeatures', 'lineageFeatures', 'proposals', 'share', 'origin', 'documentation']}
21019
21553
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.ChartKey")
21020
21554
 
21021
21555
  def __init__(self,
@@ -21086,7 +21620,7 @@ class ContainerKeyClass(_Aspect):
21086
21620
 
21087
21621
 
21088
21622
  ASPECT_NAME = 'containerKey'
21089
- ASPECT_INFO = {'keyForEntity': 'container', 'entityCategory': 'core', 'entityAspects': ['containerProperties', 'editableContainerProperties', 'dataPlatformInstance', 'subTypes', 'ownership', 'deprecation', 'container', 'globalTags', 'glossaryTerms', 'institutionalMemory', 'browsePaths', 'status', 'domains', 'proposals', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'access', 'share', 'origin', 'documentation'], 'entityDoc': 'A container of related data assets.'}
21623
+ ASPECT_INFO = {'keyForEntity': 'container', 'entityCategory': 'core', 'entityAspects': ['containerProperties', 'editableContainerProperties', 'dataPlatformInstance', 'subTypes', 'ownership', 'deprecation', 'container', 'globalTags', 'glossaryTerms', 'institutionalMemory', 'browsePaths', 'status', 'domains', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'access', 'proposals', 'share', 'origin', 'documentation'], 'entityDoc': 'A container of related data assets.'}
21090
21624
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.ContainerKey")
21091
21625
 
21092
21626
  def __init__(self,
@@ -21115,7 +21649,7 @@ class CorpGroupKeyClass(_Aspect):
21115
21649
 
21116
21650
 
21117
21651
  ASPECT_NAME = 'corpGroupKey'
21118
- ASPECT_INFO = {'keyForEntity': 'corpGroup', 'entityCategory': '_unset_', 'entityAspects': ['corpGroupInfo', 'corpGroupEditableInfo', 'globalTags', 'ownership', 'status', 'origin', 'corpGroupSettings', 'roleMembership', 'structuredProperties', 'forms', 'testResults', 'share'], 'entityDoc': 'CorpGroup represents an identity of a group of users in the enterprise.'}
21652
+ ASPECT_INFO = {'keyForEntity': 'corpGroup', 'entityCategory': '_unset_', 'entityAspects': ['corpGroupInfo', 'corpGroupEditableInfo', 'globalTags', 'ownership', 'status', 'origin', 'roleMembership', 'structuredProperties', 'forms', 'testResults', 'corpGroupSettings', 'share'], 'entityDoc': 'CorpGroup represents an identity of a group of users in the enterprise.'}
21119
21653
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.CorpGroupKey")
21120
21654
 
21121
21655
  def __init__(self,
@@ -21173,7 +21707,7 @@ class DashboardKeyClass(_Aspect):
21173
21707
 
21174
21708
 
21175
21709
  ASPECT_NAME = 'dashboardKey'
21176
- ASPECT_INFO = {'keyForEntity': 'dashboard', 'entityCategory': '_unset_', 'entityAspects': ['domains', 'container', 'deprecation', 'dashboardUsageStatistics', 'inputFields', 'usageFeatures', 'lineageFeatures', 'subTypes', 'embed', 'proposals', 'dashboardInfo', 'editableDashboardProperties', 'ownership', 'status', 'globalTags', 'browsePaths', 'glossaryTerms', 'institutionalMemory', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'incidentsSummary', 'forms', 'testResults', 'share', 'origin', 'documentation']}
21710
+ ASPECT_INFO = {'keyForEntity': 'dashboard', 'entityCategory': '_unset_', 'entityAspects': ['domains', 'container', 'deprecation', 'dashboardUsageStatistics', 'inputFields', 'subTypes', 'embed', 'dashboardInfo', 'editableDashboardProperties', 'ownership', 'status', 'globalTags', 'browsePaths', 'glossaryTerms', 'institutionalMemory', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'incidentsSummary', 'forms', 'testResults', 'usageFeatures', 'lineageFeatures', 'proposals', 'share', 'origin', 'documentation']}
21177
21711
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DashboardKey")
21178
21712
 
21179
21713
  def __init__(self,
@@ -21244,7 +21778,7 @@ class DataFlowKeyClass(_Aspect):
21244
21778
 
21245
21779
 
21246
21780
  ASPECT_NAME = 'dataFlowKey'
21247
- ASPECT_INFO = {'keyForEntity': 'dataFlow', 'entityCategory': 'core', 'entityAspects': ['domains', 'deprecation', 'versionInfo', 'proposals', 'dataFlowInfo', 'editableDataFlowProperties', 'ownership', 'status', 'globalTags', 'browsePaths', 'glossaryTerms', 'institutionalMemory', 'dataPlatformInstance', 'container', 'browsePathsV2', 'structuredProperties', 'incidentsSummary', 'forms', 'subTypes', 'testResults', 'share', 'origin', 'lineageFeatures', 'documentation']}
21781
+ ASPECT_INFO = {'keyForEntity': 'dataFlow', 'entityCategory': 'core', 'entityAspects': ['domains', 'deprecation', 'versionInfo', 'dataFlowInfo', 'editableDataFlowProperties', 'ownership', 'status', 'globalTags', 'browsePaths', 'glossaryTerms', 'institutionalMemory', 'dataPlatformInstance', 'container', 'browsePathsV2', 'structuredProperties', 'incidentsSummary', 'forms', 'subTypes', 'testResults', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation']}
21248
21782
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DataFlowKey")
21249
21783
 
21250
21784
  def __init__(self,
@@ -21357,7 +21891,7 @@ class DataHubConnectionKeyClass(_Aspect):
21357
21891
 
21358
21892
 
21359
21893
  ASPECT_NAME = 'dataHubConnectionKey'
21360
- ASPECT_INFO = {'keyForEntity': 'dataHubConnection', 'entityCategory': 'internal', 'entityAspects': ['dataHubConnectionDetails', 'dataPlatformInstance']}
21894
+ ASPECT_INFO = {'keyForEntity': 'dataHubConnection', 'entityCategory': 'internal', 'entityAspects': ['dataHubConnectionDetails', 'dataPlatformInstance', 'status']}
21361
21895
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DataHubConnectionKey")
21362
21896
 
21363
21897
  def __init__(self,
@@ -21410,6 +21944,35 @@ class DataHubIngestionSourceKeyClass(_Aspect):
21410
21944
  self._inner_dict['id'] = value
21411
21945
 
21412
21946
 
21947
+ class DataHubMetricCubeKeyClass(_Aspect):
21948
+ """Key for a DataHub Metric Cube, e.g. an internal metric."""
21949
+
21950
+
21951
+ ASPECT_NAME = 'dataHubMetricCubeKey'
21952
+ ASPECT_INFO = {'keyForEntity': 'dataHubMetricCube', 'entityCategory': 'internal', 'entityAspects': ['dataHubMetricCubeDefinition', 'dataHubMetricCubeEvent']}
21953
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DataHubMetricCubeKey")
21954
+
21955
+ def __init__(self,
21956
+ id: str,
21957
+ ):
21958
+ super().__init__()
21959
+
21960
+ self.id = id
21961
+
21962
+ def _restore_defaults(self) -> None:
21963
+ self.id = str()
21964
+
21965
+
21966
+ @property
21967
+ def id(self) -> str:
21968
+ """Unique id for the cube type."""
21969
+ return self._inner_dict.get('id') # type: ignore
21970
+
21971
+ @id.setter
21972
+ def id(self, value: str) -> None:
21973
+ self._inner_dict['id'] = value
21974
+
21975
+
21413
21976
  class DataHubPersonaKeyClass(_Aspect):
21414
21977
  """Key for a persona type"""
21415
21978
 
@@ -21660,7 +22223,7 @@ class DataJobKeyClass(_Aspect):
21660
22223
 
21661
22224
 
21662
22225
  ASPECT_NAME = 'dataJobKey'
21663
- ASPECT_INFO = {'keyForEntity': 'dataJob', 'entityCategory': '_unset_', 'entityAspects': ['datahubIngestionRunSummary', 'datahubIngestionCheckpoint', 'domains', 'deprecation', 'versionInfo', 'proposals', 'dataJobInfo', 'dataJobInputOutput', 'editableDataJobProperties', 'ownership', 'status', 'globalTags', 'browsePaths', 'glossaryTerms', 'institutionalMemory', 'dataPlatformInstance', 'container', 'browsePathsV2', 'structuredProperties', 'forms', 'anomaliesSummary', 'subTypes', 'incidentsSummary', 'testResults', 'share', 'origin', 'lineageFeatures', 'documentation', 'dataTransformLogic']}
22226
+ ASPECT_INFO = {'keyForEntity': 'dataJob', 'entityCategory': '_unset_', 'entityAspects': ['datahubIngestionRunSummary', 'datahubIngestionCheckpoint', 'domains', 'deprecation', 'versionInfo', 'dataJobInfo', 'dataJobInputOutput', 'editableDataJobProperties', 'ownership', 'status', 'globalTags', 'browsePaths', 'glossaryTerms', 'institutionalMemory', 'dataPlatformInstance', 'container', 'browsePathsV2', 'structuredProperties', 'forms', 'subTypes', 'incidentsSummary', 'testResults', 'dataTransformLogic', 'proposals', 'anomaliesSummary', 'share', 'origin', 'lineageFeatures', 'documentation']}
21664
22227
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DataJobKey")
21665
22228
 
21666
22229
  def __init__(self,
@@ -21702,7 +22265,7 @@ class DataPlatformInstanceKeyClass(_Aspect):
21702
22265
 
21703
22266
 
21704
22267
  ASPECT_NAME = 'dataPlatformInstanceKey'
21705
- ASPECT_INFO = {'keyForEntity': 'dataPlatformInstance', 'entityCategory': 'internal', 'entityAspects': ['dataPlatformInstanceProperties', 'ownership', 'globalTags', 'institutionalMemory', 'deprecation', 'status']}
22268
+ ASPECT_INFO = {'keyForEntity': 'dataPlatformInstance', 'entityCategory': 'internal', 'entityAspects': ['dataPlatformInstanceProperties', 'ownership', 'globalTags', 'institutionalMemory', 'deprecation', 'status', 'icebergWarehouseInfo']}
21706
22269
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DataPlatformInstanceKey")
21707
22270
 
21708
22271
  def __init__(self,
@@ -21858,7 +22421,7 @@ class DatasetKeyClass(_Aspect):
21858
22421
 
21859
22422
 
21860
22423
  ASPECT_NAME = 'datasetKey'
21861
- ASPECT_INFO = {'keyForEntity': 'dataset', 'entityCategory': 'core', 'entityAspects': ['viewProperties', 'subTypes', 'datasetProfile', 'datasetUsageStatistics', 'operation', 'domains', 'proposals', 'schemaProposals', 'schemaMetadata', 'status', 'container', 'deprecation', 'usageFeatures', 'storageFeatures', 'lineageFeatures', 'testResults', 'siblings', 'embed', 'incidentsSummary', 'inferredNeighbors', 'inferredMetadata', 'schemaFieldsInferredMetadata', 'schemaFieldsInferredNeighbors', 'assertionsSummary', 'datasetProperties', 'editableDatasetProperties', 'datasetDeprecation', 'datasetUpstreamLineage', 'upstreamLineage', 'institutionalMemory', 'ownership', 'editableSchemaMetadata', 'globalTags', 'glossaryTerms', 'browsePaths', 'dataPlatformInstance', 'browsePathsV2', 'anomaliesSummary', 'access', 'structuredProperties', 'forms', 'partitionsSummary', 'share', 'origin', 'documentation', 'entityInferenceMetadata', 'versionProperties'], 'entityDoc': 'Datasets represent logical or physical data assets stored or represented in various data platforms. Tables, Views, Streams are all instances of datasets.'}
22424
+ ASPECT_INFO = {'keyForEntity': 'dataset', 'entityCategory': 'core', 'entityAspects': ['viewProperties', 'subTypes', 'datasetProfile', 'datasetUsageStatistics', 'operation', 'domains', 'schemaMetadata', 'status', 'container', 'deprecation', 'testResults', 'siblings', 'embed', 'incidentsSummary', 'datasetProperties', 'editableDatasetProperties', 'datasetDeprecation', 'datasetUpstreamLineage', 'upstreamLineage', 'institutionalMemory', 'ownership', 'editableSchemaMetadata', 'globalTags', 'glossaryTerms', 'browsePaths', 'dataPlatformInstance', 'browsePathsV2', 'access', 'structuredProperties', 'forms', 'partitionsSummary', 'versionProperties', 'icebergCatalogInfo', 'inferredNeighbors', 'inferredMetadata', 'schemaFieldsInferredMetadata', 'schemaFieldsInferredNeighbors', 'assertionsSummary', 'usageFeatures', 'storageFeatures', 'lineageFeatures', 'proposals', 'schemaProposals', 'anomaliesSummary', 'share', 'origin', 'documentation', 'entityInferenceMetadata'], 'entityDoc': 'Datasets represent logical or physical data assets stored or represented in various data platforms. Tables, Views, Streams are all instances of datasets.'}
21862
22425
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DatasetKey")
21863
22426
 
21864
22427
  def __init__(self,
@@ -22235,7 +22798,7 @@ class MLFeatureKeyClass(_Aspect):
22235
22798
 
22236
22799
 
22237
22800
  ASPECT_NAME = 'mlFeatureKey'
22238
- ASPECT_INFO = {'keyForEntity': 'mlFeature', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlFeatureProperties', 'domains', 'proposals', 'mlFeatureProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'share', 'origin', 'lineageFeatures', 'documentation', 'incidentsSummary']}
22801
+ ASPECT_INFO = {'keyForEntity': 'mlFeature', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlFeatureProperties', 'domains', 'mlFeatureProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation', 'incidentsSummary']}
22239
22802
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLFeatureKey")
22240
22803
 
22241
22804
  def __init__(self,
@@ -22277,7 +22840,7 @@ class MLFeatureTableKeyClass(_Aspect):
22277
22840
 
22278
22841
 
22279
22842
  ASPECT_NAME = 'mlFeatureTableKey'
22280
- ASPECT_INFO = {'keyForEntity': 'mlFeatureTable', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlFeatureTableProperties', 'domains', 'proposals', 'mlFeatureTableProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'share', 'origin', 'lineageFeatures', 'documentation']}
22843
+ ASPECT_INFO = {'keyForEntity': 'mlFeatureTable', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlFeatureTableProperties', 'domains', 'mlFeatureTableProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation']}
22281
22844
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLFeatureTableKey")
22282
22845
 
22283
22846
  def __init__(self,
@@ -22374,7 +22937,7 @@ class MLModelGroupKeyClass(_Aspect):
22374
22937
 
22375
22938
 
22376
22939
  ASPECT_NAME = 'mlModelGroupKey'
22377
- ASPECT_INFO = {'keyForEntity': 'mlModelGroup', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlModelGroupProperties', 'domains', 'proposals', 'mlModelGroupProperties', 'ownership', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'share', 'origin', 'lineageFeatures', 'documentation']}
22940
+ ASPECT_INFO = {'keyForEntity': 'mlModelGroup', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlModelGroupProperties', 'domains', 'mlModelGroupProperties', 'ownership', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation']}
22378
22941
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLModelGroupKey")
22379
22942
 
22380
22943
  def __init__(self,
@@ -22429,7 +22992,7 @@ class MLModelKeyClass(_Aspect):
22429
22992
 
22430
22993
 
22431
22994
  ASPECT_NAME = 'mlModelKey'
22432
- ASPECT_INFO = {'keyForEntity': 'mlModel', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlModelProperties', 'domains', 'proposals', 'ownership', 'mlModelProperties', 'intendedUse', 'mlModelFactorPrompts', 'mlModelMetrics', 'mlModelEvaluationData', 'mlModelTrainingData', 'mlModelQuantitativeAnalyses', 'mlModelEthicalConsiderations', 'mlModelCaveatsAndRecommendations', 'institutionalMemory', 'sourceCode', 'status', 'cost', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'share', 'origin', 'lineageFeatures', 'documentation', 'incidentsSummary', 'versionProperties']}
22995
+ ASPECT_INFO = {'keyForEntity': 'mlModel', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlModelProperties', 'domains', 'ownership', 'mlModelProperties', 'intendedUse', 'mlModelFactorPrompts', 'mlModelMetrics', 'mlModelEvaluationData', 'mlModelTrainingData', 'mlModelQuantitativeAnalyses', 'mlModelEthicalConsiderations', 'mlModelCaveatsAndRecommendations', 'institutionalMemory', 'sourceCode', 'status', 'cost', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'versionProperties', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation', 'incidentsSummary']}
22433
22996
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLModelKey")
22434
22997
 
22435
22998
  def __init__(self,
@@ -22484,7 +23047,7 @@ class MLPrimaryKeyKeyClass(_Aspect):
22484
23047
 
22485
23048
 
22486
23049
  ASPECT_NAME = 'mlPrimaryKeyKey'
22487
- ASPECT_INFO = {'keyForEntity': 'mlPrimaryKey', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlPrimaryKeyProperties', 'domains', 'proposals', 'mlPrimaryKeyProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'globalTags', 'dataPlatformInstance', 'structuredProperties', 'forms', 'testResults', 'share', 'origin', 'lineageFeatures']}
23050
+ ASPECT_INFO = {'keyForEntity': 'mlPrimaryKey', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlPrimaryKeyProperties', 'domains', 'mlPrimaryKeyProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'globalTags', 'dataPlatformInstance', 'structuredProperties', 'forms', 'testResults', 'proposals', 'share', 'origin', 'lineageFeatures']}
22488
23051
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLPrimaryKeyKey")
22489
23052
 
22490
23053
  def __init__(self,
@@ -22568,7 +23131,7 @@ class NotebookKeyClass(_Aspect):
22568
23131
 
22569
23132
 
22570
23133
  ASPECT_NAME = 'notebookKey'
22571
- ASPECT_INFO = {'keyForEntity': 'notebook', 'entityCategory': '_unset_', 'entityAspects': ['notebookInfo', 'notebookContent', 'editableNotebookProperties', 'ownership', 'status', 'globalTags', 'glossaryTerms', 'browsePaths', 'institutionalMemory', 'domains', 'subTypes', 'dataPlatformInstance', 'proposals', 'browsePathsV2', 'testResults', 'share', 'origin', 'documentation'], 'entityDoc': 'Notebook represents a combination of query, text, chart and etc. This is in BETA version'}
23134
+ ASPECT_INFO = {'keyForEntity': 'notebook', 'entityCategory': '_unset_', 'entityAspects': ['notebookInfo', 'notebookContent', 'editableNotebookProperties', 'ownership', 'status', 'globalTags', 'glossaryTerms', 'browsePaths', 'institutionalMemory', 'domains', 'subTypes', 'dataPlatformInstance', 'browsePathsV2', 'testResults', 'proposals', 'share', 'origin', 'documentation'], 'entityDoc': 'Notebook represents a combination of query, text, chart and etc. This is in BETA version'}
22572
23135
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.NotebookKey")
22573
23136
 
22574
23137
  def __init__(self,
@@ -22735,6 +23298,36 @@ class RecommendationModuleKeyClass(_Aspect):
22735
23298
  self._inner_dict['identifier'] = value
22736
23299
 
22737
23300
 
23301
+ class RemoteExecutorGlobalConfigKeyClass(_Aspect):
23302
+ """Key for the *Singleton* DataHub Remote Executor Global Config"""
23303
+
23304
+
23305
+ ASPECT_NAME = 'dataHubRemoteExecutorGlobalConfigKey'
23306
+ ASPECT_INFO = {'keyForEntity': 'dataHubRemoteExecutorGlobalConfig', 'entityCategory': 'internal', 'entityAspects': ['dataHubRemoteExecutorPoolGlobalConfig']}
23307
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.RemoteExecutorGlobalConfigKey")
23308
+
23309
+ def __init__(self,
23310
+ id: str,
23311
+ ):
23312
+ super().__init__()
23313
+
23314
+ self.id = id
23315
+
23316
+ def _restore_defaults(self) -> None:
23317
+ self.id = str()
23318
+
23319
+
23320
+ @property
23321
+ def id(self) -> str:
23322
+ """The unique identifier for the remote executor global config
23323
+ NOTE: since this a singleton, there should be a hardcoded key in the AcrylConstants file"""
23324
+ return self._inner_dict.get('id') # type: ignore
23325
+
23326
+ @id.setter
23327
+ def id(self, value: str) -> None:
23328
+ self._inner_dict['id'] = value
23329
+
23330
+
22738
23331
  class RemoteExecutorKeyClass(_Aspect):
22739
23332
  """Key for an DataHub Remote Executor"""
22740
23333
 
@@ -22773,24 +23366,24 @@ class RemoteExecutorPoolKeyClass(_Aspect):
22773
23366
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.RemoteExecutorPoolKey")
22774
23367
 
22775
23368
  def __init__(self,
22776
- name: str,
23369
+ id: str,
22777
23370
  ):
22778
23371
  super().__init__()
22779
23372
 
22780
- self.name = name
23373
+ self.id = id
22781
23374
 
22782
23375
  def _restore_defaults(self) -> None:
22783
- self.name = str()
23376
+ self.id = str()
22784
23377
 
22785
23378
 
22786
23379
  @property
22787
- def name(self) -> str:
23380
+ def id(self) -> str:
22788
23381
  """The unique identifier for the remote executor pool"""
22789
- return self._inner_dict.get('name') # type: ignore
23382
+ return self._inner_dict.get('id') # type: ignore
22790
23383
 
22791
- @name.setter
22792
- def name(self, value: str) -> None:
22793
- self._inner_dict['name'] = value
23384
+ @id.setter
23385
+ def id(self, value: str) -> None:
23386
+ self._inner_dict['id'] = value
22794
23387
 
22795
23388
 
22796
23389
  class RoleKeyClass(_Aspect):
@@ -22827,7 +23420,7 @@ class SchemaFieldKeyClass(_Aspect):
22827
23420
 
22828
23421
 
22829
23422
  ASPECT_NAME = 'schemaFieldKey'
22830
- ASPECT_INFO = {'keyForEntity': 'schemaField', 'entityCategory': 'core', 'entityAspects': ['schemafieldInfo', 'structuredProperties', 'forms', 'businessAttributes', 'status', 'schemaFieldAliases', 'documentation', 'testResults', 'schemaFieldProfile', 'lineageFeatures', 'deprecation']}
23423
+ ASPECT_INFO = {'keyForEntity': 'schemaField', 'entityCategory': 'core', 'entityAspects': ['schemafieldInfo', 'structuredProperties', 'forms', 'businessAttributes', 'status', 'schemaFieldAliases', 'documentation', 'testResults', 'deprecation', 'schemaFieldProfile', 'lineageFeatures']}
22831
23424
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.SchemaFieldKey")
22832
23425
 
22833
23426
  def __init__(self,
@@ -24859,6 +25452,437 @@ class TagSnapshotClass(DictWrapper):
24859
25452
  self._inner_dict['aspects'] = value
24860
25453
 
24861
25454
 
25455
+ class DataHubMetricCubeDefinitionClass(_Aspect):
25456
+ """The structure of an individual metric cube in DataHub."""
25457
+
25458
+
25459
+ ASPECT_NAME = 'dataHubMetricCubeDefinition'
25460
+ ASPECT_INFO = {}
25461
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeDefinition")
25462
+
25463
+ def __init__(self,
25464
+ name: str,
25465
+ origin: "DataHubMetricCubeOriginClass",
25466
+ type: Optional[str]=None,
25467
+ description: Union[None, str]=None,
25468
+ entity: Union[None, str]=None,
25469
+ measures: Union[None, "DataHubMetricCubeMeasuresClass"]=None,
25470
+ dimensions: Union[None, "DataHubMetricCubeDimensionsClass"]=None,
25471
+ ):
25472
+ super().__init__()
25473
+
25474
+ self.name = name
25475
+ if type is None:
25476
+ # default: 'custom'
25477
+ self.type = self.RECORD_SCHEMA.fields_dict["type"].default
25478
+ else:
25479
+ self.type = type
25480
+ self.description = description
25481
+ self.entity = entity
25482
+ self.origin = origin
25483
+ self.measures = measures
25484
+ self.dimensions = dimensions
25485
+
25486
+ def _restore_defaults(self) -> None:
25487
+ self.name = str()
25488
+ self.type = self.RECORD_SCHEMA.fields_dict["type"].default
25489
+ self.description = self.RECORD_SCHEMA.fields_dict["description"].default
25490
+ self.entity = self.RECORD_SCHEMA.fields_dict["entity"].default
25491
+ self.origin = DataHubMetricCubeOriginClass._construct_with_defaults()
25492
+ self.measures = self.RECORD_SCHEMA.fields_dict["measures"].default
25493
+ self.dimensions = self.RECORD_SCHEMA.fields_dict["dimensions"].default
25494
+
25495
+
25496
+ @property
25497
+ def name(self) -> str:
25498
+ """ Display name of the metric cube"""
25499
+ return self._inner_dict.get('name') # type: ignore
25500
+
25501
+ @name.setter
25502
+ def name(self, value: str) -> None:
25503
+ self._inner_dict['name'] = value
25504
+
25505
+
25506
+ @property
25507
+ def type(self) -> str:
25508
+ """A type or category for the metric cube. This is used to categorize the metric cube & for filtering.
25509
+
25510
+ This may be used to group similar types of metrics for a given entity, e.g. 'row_count', 'error_count', etc.
25511
+ that originated in different places."""
25512
+ return self._inner_dict.get('type') # type: ignore
25513
+
25514
+ @type.setter
25515
+ def type(self, value: str) -> None:
25516
+ self._inner_dict['type'] = value
25517
+
25518
+
25519
+ @property
25520
+ def description(self) -> Union[None, str]:
25521
+ """ Optional description for the metric"""
25522
+ return self._inner_dict.get('description') # type: ignore
25523
+
25524
+ @description.setter
25525
+ def description(self, value: Union[None, str]) -> None:
25526
+ self._inner_dict['description'] = value
25527
+
25528
+
25529
+ @property
25530
+ def entity(self) -> Union[None, str]:
25531
+ """ An optional URN for the entity that this metric cube is associated with."""
25532
+ return self._inner_dict.get('entity') # type: ignore
25533
+
25534
+ @entity.setter
25535
+ def entity(self, value: Union[None, str]) -> None:
25536
+ self._inner_dict['entity'] = value
25537
+
25538
+
25539
+ @property
25540
+ def origin(self) -> "DataHubMetricCubeOriginClass":
25541
+ """The origin of the metric cube."""
25542
+ return self._inner_dict.get('origin') # type: ignore
25543
+
25544
+ @origin.setter
25545
+ def origin(self, value: "DataHubMetricCubeOriginClass") -> None:
25546
+ self._inner_dict['origin'] = value
25547
+
25548
+
25549
+ @property
25550
+ def measures(self) -> Union[None, "DataHubMetricCubeMeasuresClass"]:
25551
+ """ Optional - The measures of the cube for display purposes."""
25552
+ return self._inner_dict.get('measures') # type: ignore
25553
+
25554
+ @measures.setter
25555
+ def measures(self, value: Union[None, "DataHubMetricCubeMeasuresClass"]) -> None:
25556
+ self._inner_dict['measures'] = value
25557
+
25558
+
25559
+ @property
25560
+ def dimensions(self) -> Union[None, "DataHubMetricCubeDimensionsClass"]:
25561
+ """Optional - The dimensions of the cube for display purposes."""
25562
+ return self._inner_dict.get('dimensions') # type: ignore
25563
+
25564
+ @dimensions.setter
25565
+ def dimensions(self, value: Union[None, "DataHubMetricCubeDimensionsClass"]) -> None:
25566
+ self._inner_dict['dimensions'] = value
25567
+
25568
+
25569
+ class DataHubMetricCubeDimensionClass(DictWrapper):
25570
+ """The definition of a metric cube dimension."""
25571
+
25572
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeDimension")
25573
+ def __init__(self,
25574
+ name: str,
25575
+ ):
25576
+ super().__init__()
25577
+
25578
+ self.name = name
25579
+
25580
+ def _restore_defaults(self) -> None:
25581
+ self.name = str()
25582
+
25583
+
25584
+ @property
25585
+ def name(self) -> str:
25586
+ """ The name of the dimension"""
25587
+ return self._inner_dict.get('name') # type: ignore
25588
+
25589
+ @name.setter
25590
+ def name(self, value: str) -> None:
25591
+ self._inner_dict['name'] = value
25592
+
25593
+
25594
+ class DataHubMetricCubeDimensionsClass(DictWrapper):
25595
+ """The dimensions of the cube. This is what you filter and group by.
25596
+ This is a record to allow for future expansion of the dimensions."""
25597
+
25598
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeDimensions")
25599
+ def __init__(self,
25600
+ dim1: Union[None, "DataHubMetricCubeDimensionClass"]=None,
25601
+ dim2: Union[None, "DataHubMetricCubeDimensionClass"]=None,
25602
+ dim3: Union[None, "DataHubMetricCubeDimensionClass"]=None,
25603
+ ):
25604
+ super().__init__()
25605
+
25606
+ self.dim1 = dim1
25607
+ self.dim2 = dim2
25608
+ self.dim3 = dim3
25609
+
25610
+ def _restore_defaults(self) -> None:
25611
+ self.dim1 = self.RECORD_SCHEMA.fields_dict["dim1"].default
25612
+ self.dim2 = self.RECORD_SCHEMA.fields_dict["dim2"].default
25613
+ self.dim3 = self.RECORD_SCHEMA.fields_dict["dim3"].default
25614
+
25615
+
25616
+ @property
25617
+ def dim1(self) -> Union[None, "DataHubMetricCubeDimensionClass"]:
25618
+ """ The first measure being tracked in the cube."""
25619
+ return self._inner_dict.get('dim1') # type: ignore
25620
+
25621
+ @dim1.setter
25622
+ def dim1(self, value: Union[None, "DataHubMetricCubeDimensionClass"]) -> None:
25623
+ self._inner_dict['dim1'] = value
25624
+
25625
+
25626
+ @property
25627
+ def dim2(self) -> Union[None, "DataHubMetricCubeDimensionClass"]:
25628
+ """Optional: A second measure being tracked in the cube."""
25629
+ return self._inner_dict.get('dim2') # type: ignore
25630
+
25631
+ @dim2.setter
25632
+ def dim2(self, value: Union[None, "DataHubMetricCubeDimensionClass"]) -> None:
25633
+ self._inner_dict['dim2'] = value
25634
+
25635
+
25636
+ @property
25637
+ def dim3(self) -> Union[None, "DataHubMetricCubeDimensionClass"]:
25638
+ """Optional: A third measure being tracked in the cube."""
25639
+ return self._inner_dict.get('dim3') # type: ignore
25640
+
25641
+ @dim3.setter
25642
+ def dim3(self, value: Union[None, "DataHubMetricCubeDimensionClass"]) -> None:
25643
+ self._inner_dict['dim3'] = value
25644
+
25645
+
25646
+ class DataHubMetricCubeEventClass(_Aspect):
25647
+ """A timeseries measure event, e.g. a single observation."""
25648
+
25649
+
25650
+ ASPECT_NAME = 'dataHubMetricCubeEvent'
25651
+ ASPECT_TYPE = 'timeseries'
25652
+ ASPECT_INFO = {}
25653
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeEvent")
25654
+
25655
+ def __init__(self,
25656
+ reportedTimeMillis: int,
25657
+ measure: float,
25658
+ timestampMillis: int,
25659
+ dim1: Union[None, List[str]]=None,
25660
+ dim2: Union[None, List[str]]=None,
25661
+ dim3: Union[None, List[str]]=None,
25662
+ eventGranularity: Union[None, "TimeWindowSizeClass"]=None,
25663
+ partitionSpec: Optional[Union["PartitionSpecClass", None]]=None,
25664
+ messageId: Union[None, str]=None,
25665
+ ):
25666
+ super().__init__()
25667
+
25668
+ self.reportedTimeMillis = reportedTimeMillis
25669
+ self.measure = measure
25670
+ self.dim1 = dim1
25671
+ self.dim2 = dim2
25672
+ self.dim3 = dim3
25673
+ self.timestampMillis = timestampMillis
25674
+ self.eventGranularity = eventGranularity
25675
+ if partitionSpec is None:
25676
+ # default: {'partition': 'FULL_TABLE_SNAPSHOT', 'type': 'FULL_TABLE', 'timePartition': None}
25677
+ self.partitionSpec = _json_converter.from_json_object(self.RECORD_SCHEMA.fields_dict["partitionSpec"].default, writers_schema=self.RECORD_SCHEMA.fields_dict["partitionSpec"].type)
25678
+ else:
25679
+ self.partitionSpec = partitionSpec
25680
+ self.messageId = messageId
25681
+
25682
+ def _restore_defaults(self) -> None:
25683
+ self.reportedTimeMillis = int()
25684
+ self.measure = float()
25685
+ self.dim1 = self.RECORD_SCHEMA.fields_dict["dim1"].default
25686
+ self.dim2 = self.RECORD_SCHEMA.fields_dict["dim2"].default
25687
+ self.dim3 = self.RECORD_SCHEMA.fields_dict["dim3"].default
25688
+ self.timestampMillis = int()
25689
+ self.eventGranularity = self.RECORD_SCHEMA.fields_dict["eventGranularity"].default
25690
+ self.partitionSpec = _json_converter.from_json_object(self.RECORD_SCHEMA.fields_dict["partitionSpec"].default, writers_schema=self.RECORD_SCHEMA.fields_dict["partitionSpec"].type)
25691
+ self.messageId = self.RECORD_SCHEMA.fields_dict["messageId"].default
25692
+
25693
+
25694
+ @property
25695
+ def reportedTimeMillis(self) -> int:
25696
+ """The event or bucket reported time field as epoch at UTC in milli seconds.
25697
+ This must be provided in order to pass validation."""
25698
+ return self._inner_dict.get('reportedTimeMillis') # type: ignore
25699
+
25700
+ @reportedTimeMillis.setter
25701
+ def reportedTimeMillis(self, value: int) -> None:
25702
+ self._inner_dict['reportedTimeMillis'] = value
25703
+
25704
+
25705
+ @property
25706
+ def measure(self) -> float:
25707
+ """The first measure value - Typically this is the primary metric."""
25708
+ return self._inner_dict.get('measure') # type: ignore
25709
+
25710
+ @measure.setter
25711
+ def measure(self, value: float) -> None:
25712
+ self._inner_dict['measure'] = value
25713
+
25714
+
25715
+ @property
25716
+ def dim1(self) -> Union[None, List[str]]:
25717
+ """The first dimension value(s). Array type to support multi-dimensionality."""
25718
+ return self._inner_dict.get('dim1') # type: ignore
25719
+
25720
+ @dim1.setter
25721
+ def dim1(self, value: Union[None, List[str]]) -> None:
25722
+ self._inner_dict['dim1'] = value
25723
+
25724
+
25725
+ @property
25726
+ def dim2(self) -> Union[None, List[str]]:
25727
+ """The second dimension value(s). Array type to support multi-dimensionality."""
25728
+ return self._inner_dict.get('dim2') # type: ignore
25729
+
25730
+ @dim2.setter
25731
+ def dim2(self, value: Union[None, List[str]]) -> None:
25732
+ self._inner_dict['dim2'] = value
25733
+
25734
+
25735
+ @property
25736
+ def dim3(self) -> Union[None, List[str]]:
25737
+ """The third dimension value(s). Array type to support multi-dimensionality."""
25738
+ return self._inner_dict.get('dim3') # type: ignore
25739
+
25740
+ @dim3.setter
25741
+ def dim3(self, value: Union[None, List[str]]) -> None:
25742
+ self._inner_dict['dim3'] = value
25743
+
25744
+
25745
+ @property
25746
+ def timestampMillis(self) -> int:
25747
+ """The event timestamp field as epoch at UTC in milli seconds."""
25748
+ return self._inner_dict.get('timestampMillis') # type: ignore
25749
+
25750
+ @timestampMillis.setter
25751
+ def timestampMillis(self, value: int) -> None:
25752
+ self._inner_dict['timestampMillis'] = value
25753
+
25754
+
25755
+ @property
25756
+ def eventGranularity(self) -> Union[None, "TimeWindowSizeClass"]:
25757
+ """Granularity of the event if applicable"""
25758
+ return self._inner_dict.get('eventGranularity') # type: ignore
25759
+
25760
+ @eventGranularity.setter
25761
+ def eventGranularity(self, value: Union[None, "TimeWindowSizeClass"]) -> None:
25762
+ self._inner_dict['eventGranularity'] = value
25763
+
25764
+
25765
+ @property
25766
+ def partitionSpec(self) -> Union["PartitionSpecClass", None]:
25767
+ """The optional partition specification."""
25768
+ return self._inner_dict.get('partitionSpec') # type: ignore
25769
+
25770
+ @partitionSpec.setter
25771
+ def partitionSpec(self, value: Union["PartitionSpecClass", None]) -> None:
25772
+ self._inner_dict['partitionSpec'] = value
25773
+
25774
+
25775
+ @property
25776
+ def messageId(self) -> Union[None, str]:
25777
+ """The optional messageId, if provided serves as a custom user-defined unique identifier for an aspect value."""
25778
+ return self._inner_dict.get('messageId') # type: ignore
25779
+
25780
+ @messageId.setter
25781
+ def messageId(self, value: Union[None, str]) -> None:
25782
+ self._inner_dict['messageId'] = value
25783
+
25784
+
25785
+ class DataHubMetricCubeMeasureClass(DictWrapper):
25786
+ """The definition of a metric cube measure."""
25787
+
25788
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeMeasure")
25789
+ def __init__(self,
25790
+ name: str,
25791
+ ):
25792
+ super().__init__()
25793
+
25794
+ self.name = name
25795
+
25796
+ def _restore_defaults(self) -> None:
25797
+ self.name = str()
25798
+
25799
+
25800
+ @property
25801
+ def name(self) -> str:
25802
+ """ The name of the measure"""
25803
+ return self._inner_dict.get('name') # type: ignore
25804
+
25805
+ @name.setter
25806
+ def name(self, value: str) -> None:
25807
+ self._inner_dict['name'] = value
25808
+
25809
+
25810
+ class DataHubMetricCubeMeasuresClass(DictWrapper):
25811
+ """The definition of the measures of a metric cube.
25812
+ A measure is a metric that is being tracked in the cube."""
25813
+
25814
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeMeasures")
25815
+ def __init__(self,
25816
+ measure: "DataHubMetricCubeMeasureClass",
25817
+ ):
25818
+ super().__init__()
25819
+
25820
+ self.measure = measure
25821
+
25822
+ def _restore_defaults(self) -> None:
25823
+ self.measure = DataHubMetricCubeMeasureClass._construct_with_defaults()
25824
+
25825
+
25826
+ @property
25827
+ def measure(self) -> "DataHubMetricCubeMeasureClass":
25828
+ """ The first measure being tracked in the cube."""
25829
+ return self._inner_dict.get('measure') # type: ignore
25830
+
25831
+ @measure.setter
25832
+ def measure(self, value: "DataHubMetricCubeMeasureClass") -> None:
25833
+ self._inner_dict['measure'] = value
25834
+
25835
+
25836
+ class DataHubMetricCubeOriginClass(DictWrapper):
25837
+ """Information about the origin of the metric cube"""
25838
+
25839
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeOrigin")
25840
+ def __init__(self,
25841
+ type: Union[str, "DataHubMetricSourceTypeClass"],
25842
+ originUrn: Union[None, str]=None,
25843
+ ):
25844
+ super().__init__()
25845
+
25846
+ self.type = type
25847
+ self.originUrn = originUrn
25848
+
25849
+ def _restore_defaults(self) -> None:
25850
+ self.type = DataHubMetricSourceTypeClass.MANUAL
25851
+ self.originUrn = self.RECORD_SCHEMA.fields_dict["originUrn"].default
25852
+
25853
+
25854
+ @property
25855
+ def type(self) -> Union[str, "DataHubMetricSourceTypeClass"]:
25856
+ """Message associated with the incident"""
25857
+ return self._inner_dict.get('type') # type: ignore
25858
+
25859
+ @type.setter
25860
+ def type(self, value: Union[str, "DataHubMetricSourceTypeClass"]) -> None:
25861
+ self._inner_dict['type'] = value
25862
+
25863
+
25864
+ @property
25865
+ def originUrn(self) -> Union[None, str]:
25866
+ """Reference to the source that created the metric.
25867
+ In the case of assertion monitor, this is the URN of the assertion monitor."""
25868
+ return self._inner_dict.get('originUrn') # type: ignore
25869
+
25870
+ @originUrn.setter
25871
+ def originUrn(self, value: Union[None, str]) -> None:
25872
+ self._inner_dict['originUrn'] = value
25873
+
25874
+
25875
+ class DataHubMetricSourceTypeClass(object):
25876
+ # No docs available.
25877
+
25878
+ MANUAL = "MANUAL"
25879
+ """Manually created metric, via UI or API."""
25880
+
25881
+ ASSERTION_MONITOR = "ASSERTION_MONITOR"
25882
+ """Assertion monitor created the metric."""
25883
+
25884
+
25885
+
24862
25886
  class BaseDataClass(DictWrapper):
24863
25887
  """BaseData record"""
24864
25888
 
@@ -27339,7 +28363,7 @@ class MonitorInfoClass(_Aspect):
27339
28363
 
27340
28364
  @property
27341
28365
  def executorId(self) -> Union[None, str]:
27342
- """Advanced: The executor ID of the remote monitor service, if any."""
28366
+ """Advanced: The executor pool id of the remote monitor service, if any."""
27343
28367
  return self._inner_dict.get('executorId') # type: ignore
27344
28368
 
27345
28369
  @executorId.setter
@@ -34992,7 +36016,7 @@ class VersionSetPropertiesClass(_Aspect):
34992
36016
  def _restore_defaults(self) -> None:
34993
36017
  self.customProperties = dict()
34994
36018
  self.latest = str()
34995
- self.versioningScheme = VersioningSchemeClass.ALPHANUMERIC_GENERATED_BY_DATAHUB
36019
+ self.versioningScheme = VersioningSchemeClass.LEXICOGRAPHIC_STRING
34996
36020
 
34997
36021
 
34998
36022
  @property
@@ -35028,7 +36052,12 @@ class VersionSetPropertiesClass(_Aspect):
35028
36052
  class VersioningSchemeClass(object):
35029
36053
  # No docs available.
35030
36054
 
36055
+ LEXICOGRAPHIC_STRING = "LEXICOGRAPHIC_STRING"
36056
+ """String sorted lexicographically."""
36057
+
35031
36058
  ALPHANUMERIC_GENERATED_BY_DATAHUB = "ALPHANUMERIC_GENERATED_BY_DATAHUB"
36059
+ """String managed by DataHub. Currently, an 8 character alphabetical string."""
36060
+
35032
36061
 
35033
36062
 
35034
36063
  class DataHubViewDefinitionClass(DictWrapper):
@@ -35195,7 +36224,10 @@ __SCHEMA_TYPES = {
35195
36224
  'com.linkedin.pegasus2avro.actionrequest.DataContractProposal': DataContractProposalClass,
35196
36225
  'com.linkedin.pegasus2avro.actionrequest.DataContractProposalOperationType': DataContractProposalOperationTypeClass,
35197
36226
  'com.linkedin.pegasus2avro.actionrequest.DescriptionProposal': DescriptionProposalClass,
36227
+ 'com.linkedin.pegasus2avro.actionrequest.DomainProposal': DomainProposalClass,
35198
36228
  'com.linkedin.pegasus2avro.actionrequest.GlossaryTermProposal': GlossaryTermProposalClass,
36229
+ 'com.linkedin.pegasus2avro.actionrequest.OwnerProposal': OwnerProposalClass,
36230
+ 'com.linkedin.pegasus2avro.actionrequest.StructuredPropertyProposal': StructuredPropertyProposalClass,
35199
36231
  'com.linkedin.pegasus2avro.actionrequest.TagProposal': TagProposalClass,
35200
36232
  'com.linkedin.pegasus2avro.ai.AiInferenceMetadata': AiInferenceMetadataClass,
35201
36233
  'com.linkedin.pegasus2avro.ai.EntityInferenceMetadata': EntityInferenceMetadataClass,
@@ -35407,6 +36439,7 @@ __SCHEMA_TYPES = {
35407
36439
  'com.linkedin.pegasus2avro.dataplatform.PlatformType': PlatformTypeClass,
35408
36440
  'com.linkedin.pegasus2avro.dataplatform.slack.SlackUserInfo': SlackUserInfoClass,
35409
36441
  'com.linkedin.pegasus2avro.dataplatforminstance.DataPlatformInstanceProperties': DataPlatformInstancePropertiesClass,
36442
+ 'com.linkedin.pegasus2avro.dataplatforminstance.IcebergWarehouseInfo': IcebergWarehouseInfoClass,
35410
36443
  'com.linkedin.pegasus2avro.dataprocess.DataProcessInfo': DataProcessInfoClass,
35411
36444
  'com.linkedin.pegasus2avro.dataprocess.DataProcessInstanceInput': DataProcessInstanceInputClass,
35412
36445
  'com.linkedin.pegasus2avro.dataprocess.DataProcessInstanceOutput': DataProcessInstanceOutputClass,
@@ -35439,6 +36472,7 @@ __SCHEMA_TYPES = {
35439
36472
  'com.linkedin.pegasus2avro.dataset.FineGrainedLineageDownstreamType': FineGrainedLineageDownstreamTypeClass,
35440
36473
  'com.linkedin.pegasus2avro.dataset.FineGrainedLineageUpstreamType': FineGrainedLineageUpstreamTypeClass,
35441
36474
  'com.linkedin.pegasus2avro.dataset.Histogram': HistogramClass,
36475
+ 'com.linkedin.pegasus2avro.dataset.IcebergCatalogInfo': IcebergCatalogInfoClass,
35442
36476
  'com.linkedin.pegasus2avro.dataset.PartitionSummary': PartitionSummaryClass,
35443
36477
  'com.linkedin.pegasus2avro.dataset.PartitionsSummary': PartitionsSummaryClass,
35444
36478
  'com.linkedin.pegasus2avro.dataset.Quantile': QuantileClass,
@@ -35477,7 +36511,10 @@ __SCHEMA_TYPES = {
35477
36511
  'com.linkedin.pegasus2avro.execution.ExecutionRequestSource': ExecutionRequestSourceClass,
35478
36512
  'com.linkedin.pegasus2avro.execution.StructuredExecutionReport': StructuredExecutionReportClass,
35479
36513
  'com.linkedin.pegasus2avro.executor.RemoteExecutorStatus': RemoteExecutorStatusClass,
36514
+ 'com.linkedin.pegasus2avro.executorglobalconfig.RemoteExecutorPoolGlobalConfig': RemoteExecutorPoolGlobalConfigClass,
35480
36515
  'com.linkedin.pegasus2avro.executorpool.RemoteExecutorPoolInfo': RemoteExecutorPoolInfoClass,
36516
+ 'com.linkedin.pegasus2avro.executorpool.RemoteExecutorPoolState': RemoteExecutorPoolStateClass,
36517
+ 'com.linkedin.pegasus2avro.executorpool.RemoteExecutorPoolStatus': RemoteExecutorPoolStatusClass,
35481
36518
  'com.linkedin.pegasus2avro.form.DomainParams': DomainParamsClass,
35482
36519
  'com.linkedin.pegasus2avro.form.DynamicFormAssignment': DynamicFormAssignmentClass,
35483
36520
  'com.linkedin.pegasus2avro.form.FormActorAssignment': FormActorAssignmentClass,
@@ -35557,6 +36594,7 @@ __SCHEMA_TYPES = {
35557
36594
  'com.linkedin.pegasus2avro.metadata.key.DataHubActionKey': DataHubActionKeyClass,
35558
36595
  'com.linkedin.pegasus2avro.metadata.key.DataHubConnectionKey': DataHubConnectionKeyClass,
35559
36596
  'com.linkedin.pegasus2avro.metadata.key.DataHubIngestionSourceKey': DataHubIngestionSourceKeyClass,
36597
+ 'com.linkedin.pegasus2avro.metadata.key.DataHubMetricCubeKey': DataHubMetricCubeKeyClass,
35560
36598
  'com.linkedin.pegasus2avro.metadata.key.DataHubPersonaKey': DataHubPersonaKeyClass,
35561
36599
  'com.linkedin.pegasus2avro.metadata.key.DataHubPolicyKey': DataHubPolicyKeyClass,
35562
36600
  'com.linkedin.pegasus2avro.metadata.key.DataHubRetentionKey': DataHubRetentionKeyClass,
@@ -35594,6 +36632,7 @@ __SCHEMA_TYPES = {
35594
36632
  'com.linkedin.pegasus2avro.metadata.key.PostKey': PostKeyClass,
35595
36633
  'com.linkedin.pegasus2avro.metadata.key.QueryKey': QueryKeyClass,
35596
36634
  'com.linkedin.pegasus2avro.metadata.key.RecommendationModuleKey': RecommendationModuleKeyClass,
36635
+ 'com.linkedin.pegasus2avro.metadata.key.RemoteExecutorGlobalConfigKey': RemoteExecutorGlobalConfigKeyClass,
35597
36636
  'com.linkedin.pegasus2avro.metadata.key.RemoteExecutorKey': RemoteExecutorKeyClass,
35598
36637
  'com.linkedin.pegasus2avro.metadata.key.RemoteExecutorPoolKey': RemoteExecutorPoolKeyClass,
35599
36638
  'com.linkedin.pegasus2avro.metadata.key.RoleKey': RoleKeyClass,
@@ -35641,6 +36680,14 @@ __SCHEMA_TYPES = {
35641
36680
  'com.linkedin.pegasus2avro.metadata.snapshot.MLPrimaryKeySnapshot': MLPrimaryKeySnapshotClass,
35642
36681
  'com.linkedin.pegasus2avro.metadata.snapshot.SchemaFieldSnapshot': SchemaFieldSnapshotClass,
35643
36682
  'com.linkedin.pegasus2avro.metadata.snapshot.TagSnapshot': TagSnapshotClass,
36683
+ 'com.linkedin.pegasus2avro.metric.DataHubMetricCubeDefinition': DataHubMetricCubeDefinitionClass,
36684
+ 'com.linkedin.pegasus2avro.metric.DataHubMetricCubeDimension': DataHubMetricCubeDimensionClass,
36685
+ 'com.linkedin.pegasus2avro.metric.DataHubMetricCubeDimensions': DataHubMetricCubeDimensionsClass,
36686
+ 'com.linkedin.pegasus2avro.metric.DataHubMetricCubeEvent': DataHubMetricCubeEventClass,
36687
+ 'com.linkedin.pegasus2avro.metric.DataHubMetricCubeMeasure': DataHubMetricCubeMeasureClass,
36688
+ 'com.linkedin.pegasus2avro.metric.DataHubMetricCubeMeasures': DataHubMetricCubeMeasuresClass,
36689
+ 'com.linkedin.pegasus2avro.metric.DataHubMetricCubeOrigin': DataHubMetricCubeOriginClass,
36690
+ 'com.linkedin.pegasus2avro.metric.DataHubMetricSourceType': DataHubMetricSourceTypeClass,
35644
36691
  'com.linkedin.pegasus2avro.ml.metadata.BaseData': BaseDataClass,
35645
36692
  'com.linkedin.pegasus2avro.ml.metadata.CaveatDetails': CaveatDetailsClass,
35646
36693
  'com.linkedin.pegasus2avro.ml.metadata.CaveatsAndRecommendations': CaveatsAndRecommendationsClass,
@@ -35866,7 +36913,10 @@ __SCHEMA_TYPES = {
35866
36913
  'DataContractProposal': DataContractProposalClass,
35867
36914
  'DataContractProposalOperationType': DataContractProposalOperationTypeClass,
35868
36915
  'DescriptionProposal': DescriptionProposalClass,
36916
+ 'DomainProposal': DomainProposalClass,
35869
36917
  'GlossaryTermProposal': GlossaryTermProposalClass,
36918
+ 'OwnerProposal': OwnerProposalClass,
36919
+ 'StructuredPropertyProposal': StructuredPropertyProposalClass,
35870
36920
  'TagProposal': TagProposalClass,
35871
36921
  'AiInferenceMetadata': AiInferenceMetadataClass,
35872
36922
  'EntityInferenceMetadata': EntityInferenceMetadataClass,
@@ -36078,6 +37128,7 @@ __SCHEMA_TYPES = {
36078
37128
  'PlatformType': PlatformTypeClass,
36079
37129
  'SlackUserInfo': SlackUserInfoClass,
36080
37130
  'DataPlatformInstanceProperties': DataPlatformInstancePropertiesClass,
37131
+ 'IcebergWarehouseInfo': IcebergWarehouseInfoClass,
36081
37132
  'DataProcessInfo': DataProcessInfoClass,
36082
37133
  'DataProcessInstanceInput': DataProcessInstanceInputClass,
36083
37134
  'DataProcessInstanceOutput': DataProcessInstanceOutputClass,
@@ -36110,6 +37161,7 @@ __SCHEMA_TYPES = {
36110
37161
  'FineGrainedLineageDownstreamType': FineGrainedLineageDownstreamTypeClass,
36111
37162
  'FineGrainedLineageUpstreamType': FineGrainedLineageUpstreamTypeClass,
36112
37163
  'Histogram': HistogramClass,
37164
+ 'IcebergCatalogInfo': IcebergCatalogInfoClass,
36113
37165
  'PartitionSummary': PartitionSummaryClass,
36114
37166
  'PartitionsSummary': PartitionsSummaryClass,
36115
37167
  'Quantile': QuantileClass,
@@ -36148,7 +37200,10 @@ __SCHEMA_TYPES = {
36148
37200
  'ExecutionRequestSource': ExecutionRequestSourceClass,
36149
37201
  'StructuredExecutionReport': StructuredExecutionReportClass,
36150
37202
  'RemoteExecutorStatus': RemoteExecutorStatusClass,
37203
+ 'RemoteExecutorPoolGlobalConfig': RemoteExecutorPoolGlobalConfigClass,
36151
37204
  'RemoteExecutorPoolInfo': RemoteExecutorPoolInfoClass,
37205
+ 'RemoteExecutorPoolState': RemoteExecutorPoolStateClass,
37206
+ 'RemoteExecutorPoolStatus': RemoteExecutorPoolStatusClass,
36152
37207
  'DomainParams': DomainParamsClass,
36153
37208
  'DynamicFormAssignment': DynamicFormAssignmentClass,
36154
37209
  'FormActorAssignment': FormActorAssignmentClass,
@@ -36228,6 +37283,7 @@ __SCHEMA_TYPES = {
36228
37283
  'DataHubActionKey': DataHubActionKeyClass,
36229
37284
  'DataHubConnectionKey': DataHubConnectionKeyClass,
36230
37285
  'DataHubIngestionSourceKey': DataHubIngestionSourceKeyClass,
37286
+ 'DataHubMetricCubeKey': DataHubMetricCubeKeyClass,
36231
37287
  'DataHubPersonaKey': DataHubPersonaKeyClass,
36232
37288
  'DataHubPolicyKey': DataHubPolicyKeyClass,
36233
37289
  'DataHubRetentionKey': DataHubRetentionKeyClass,
@@ -36265,6 +37321,7 @@ __SCHEMA_TYPES = {
36265
37321
  'PostKey': PostKeyClass,
36266
37322
  'QueryKey': QueryKeyClass,
36267
37323
  'RecommendationModuleKey': RecommendationModuleKeyClass,
37324
+ 'RemoteExecutorGlobalConfigKey': RemoteExecutorGlobalConfigKeyClass,
36268
37325
  'RemoteExecutorKey': RemoteExecutorKeyClass,
36269
37326
  'RemoteExecutorPoolKey': RemoteExecutorPoolKeyClass,
36270
37327
  'RoleKey': RoleKeyClass,
@@ -36312,6 +37369,14 @@ __SCHEMA_TYPES = {
36312
37369
  'MLPrimaryKeySnapshot': MLPrimaryKeySnapshotClass,
36313
37370
  'SchemaFieldSnapshot': SchemaFieldSnapshotClass,
36314
37371
  'TagSnapshot': TagSnapshotClass,
37372
+ 'DataHubMetricCubeDefinition': DataHubMetricCubeDefinitionClass,
37373
+ 'DataHubMetricCubeDimension': DataHubMetricCubeDimensionClass,
37374
+ 'DataHubMetricCubeDimensions': DataHubMetricCubeDimensionsClass,
37375
+ 'DataHubMetricCubeEvent': DataHubMetricCubeEventClass,
37376
+ 'DataHubMetricCubeMeasure': DataHubMetricCubeMeasureClass,
37377
+ 'DataHubMetricCubeMeasures': DataHubMetricCubeMeasuresClass,
37378
+ 'DataHubMetricCubeOrigin': DataHubMetricCubeOriginClass,
37379
+ 'DataHubMetricSourceType': DataHubMetricSourceTypeClass,
36315
37380
  'BaseData': BaseDataClass,
36316
37381
  'CaveatDetails': CaveatDetailsClass,
36317
37382
  'CaveatsAndRecommendations': CaveatsAndRecommendationsClass,
@@ -36566,6 +37631,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
36566
37631
  EditableContainerPropertiesClass,
36567
37632
  ContainerPropertiesClass,
36568
37633
  ContainerClass,
37634
+ RemoteExecutorPoolGlobalConfigClass,
36569
37635
  DataHubRetentionConfigClass,
36570
37636
  TelemetryClientIdClass,
36571
37637
  DataHubAccessTokenInfoClass,
@@ -36609,6 +37675,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
36609
37675
  TestInfoClass,
36610
37676
  BatchTestRunEventClass,
36611
37677
  DataPlatformInstancePropertiesClass,
37678
+ IcebergWarehouseInfoClass,
36612
37679
  EditableERModelRelationshipPropertiesClass,
36613
37680
  ERModelRelationshipPropertiesClass,
36614
37681
  EntityTypeInfoClass,
@@ -36618,6 +37685,8 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
36618
37685
  EditableSchemaMetadataClass,
36619
37686
  SchemaProposalsClass,
36620
37687
  SchemaMetadataClass,
37688
+ DataHubMetricCubeEventClass,
37689
+ DataHubMetricCubeDefinitionClass,
36621
37690
  AccessClass,
36622
37691
  AnomaliesSummaryClass,
36623
37692
  ProposalsClass,
@@ -36666,6 +37735,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
36666
37735
  EditableDatasetPropertiesClass,
36667
37736
  DatasetProfileClass,
36668
37737
  DatasetDeprecationClass,
37738
+ IcebergCatalogInfoClass,
36669
37739
  DatasetPropertiesClass,
36670
37740
  PartitionsSummaryClass,
36671
37741
  DatasetUpstreamLineageClass,
@@ -36710,6 +37780,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
36710
37780
  PostKeyClass,
36711
37781
  MLPrimaryKeyKeyClass,
36712
37782
  DataPlatformInstanceKeyClass,
37783
+ DataHubMetricCubeKeyClass,
36713
37784
  QueryKeyClass,
36714
37785
  DatasetKeyClass,
36715
37786
  ExecutionRequestKeyClass,
@@ -36744,6 +37815,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
36744
37815
  DataHubRetentionKeyClass,
36745
37816
  OwnershipTypeKeyClass,
36746
37817
  ActionRequestKeyClass,
37818
+ RemoteExecutorGlobalConfigKeyClass,
36747
37819
  DataFlowKeyClass,
36748
37820
  GenericEntityKeyClass,
36749
37821
  DataContractKeyClass,
@@ -36792,6 +37864,7 @@ ASPECT_NAME_MAP: Dict[str, Type[_Aspect]] = {
36792
37864
  for aspect in ASPECT_CLASSES
36793
37865
  }
36794
37866
 
37867
+ from typing import Literal
36795
37868
  from typing_extensions import TypedDict
36796
37869
 
36797
37870
  class AspectBag(TypedDict, total=False):
@@ -36835,6 +37908,7 @@ class AspectBag(TypedDict, total=False):
36835
37908
  editableContainerProperties: EditableContainerPropertiesClass
36836
37909
  containerProperties: ContainerPropertiesClass
36837
37910
  container: ContainerClass
37911
+ dataHubRemoteExecutorPoolGlobalConfig: RemoteExecutorPoolGlobalConfigClass
36838
37912
  dataHubRetentionConfig: DataHubRetentionConfigClass
36839
37913
  telemetryClientId: TelemetryClientIdClass
36840
37914
  dataHubAccessTokenInfo: DataHubAccessTokenInfoClass
@@ -36878,6 +37952,7 @@ class AspectBag(TypedDict, total=False):
36878
37952
  testInfo: TestInfoClass
36879
37953
  batchTestRunEvent: BatchTestRunEventClass
36880
37954
  dataPlatformInstanceProperties: DataPlatformInstancePropertiesClass
37955
+ icebergWarehouseInfo: IcebergWarehouseInfoClass
36881
37956
  editableERModelRelationshipProperties: EditableERModelRelationshipPropertiesClass
36882
37957
  erModelRelationshipProperties: ERModelRelationshipPropertiesClass
36883
37958
  entityTypeInfo: EntityTypeInfoClass
@@ -36887,6 +37962,8 @@ class AspectBag(TypedDict, total=False):
36887
37962
  editableSchemaMetadata: EditableSchemaMetadataClass
36888
37963
  schemaProposals: SchemaProposalsClass
36889
37964
  schemaMetadata: SchemaMetadataClass
37965
+ dataHubMetricCubeEvent: DataHubMetricCubeEventClass
37966
+ dataHubMetricCubeDefinition: DataHubMetricCubeDefinitionClass
36890
37967
  access: AccessClass
36891
37968
  anomaliesSummary: AnomaliesSummaryClass
36892
37969
  proposals: ProposalsClass
@@ -36935,6 +38012,7 @@ class AspectBag(TypedDict, total=False):
36935
38012
  editableDatasetProperties: EditableDatasetPropertiesClass
36936
38013
  datasetProfile: DatasetProfileClass
36937
38014
  datasetDeprecation: DatasetDeprecationClass
38015
+ icebergCatalogInfo: IcebergCatalogInfoClass
36938
38016
  datasetProperties: DatasetPropertiesClass
36939
38017
  partitionsSummary: PartitionsSummaryClass
36940
38018
  datasetUpstreamLineage: DatasetUpstreamLineageClass
@@ -36979,6 +38057,7 @@ class AspectBag(TypedDict, total=False):
36979
38057
  postKey: PostKeyClass
36980
38058
  mlPrimaryKeyKey: MLPrimaryKeyKeyClass
36981
38059
  dataPlatformInstanceKey: DataPlatformInstanceKeyClass
38060
+ dataHubMetricCubeKey: DataHubMetricCubeKeyClass
36982
38061
  queryKey: QueryKeyClass
36983
38062
  datasetKey: DatasetKeyClass
36984
38063
  dataHubExecutionRequestKey: ExecutionRequestKeyClass
@@ -37013,6 +38092,7 @@ class AspectBag(TypedDict, total=False):
37013
38092
  dataHubRetentionKey: DataHubRetentionKeyClass
37014
38093
  ownershipTypeKey: OwnershipTypeKeyClass
37015
38094
  actionRequestKey: ActionRequestKeyClass
38095
+ dataHubRemoteExecutorGlobalConfigKey: RemoteExecutorGlobalConfigKeyClass
37016
38096
  dataFlowKey: DataFlowKeyClass
37017
38097
  genericEntityKey: GenericEntityKeyClass
37018
38098
  dataContractKey: DataContractKeyClass
@@ -37077,6 +38157,7 @@ KEY_ASPECTS: Dict[str, Type[_Aspect]] = {
37077
38157
  'post': PostKeyClass,
37078
38158
  'mlPrimaryKey': MLPrimaryKeyKeyClass,
37079
38159
  'dataPlatformInstance': DataPlatformInstanceKeyClass,
38160
+ 'dataHubMetricCube': DataHubMetricCubeKeyClass,
37080
38161
  'query': QueryKeyClass,
37081
38162
  'dataset': DatasetKeyClass,
37082
38163
  'dataHubExecutionRequest': ExecutionRequestKeyClass,
@@ -37111,6 +38192,7 @@ KEY_ASPECTS: Dict[str, Type[_Aspect]] = {
37111
38192
  'dataHubRetention': DataHubRetentionKeyClass,
37112
38193
  'ownershipType': OwnershipTypeKeyClass,
37113
38194
  'actionRequest': ActionRequestKeyClass,
38195
+ 'dataHubRemoteExecutorGlobalConfig': RemoteExecutorGlobalConfigKeyClass,
37114
38196
  'dataFlow': DataFlowKeyClass,
37115
38197
  'dataContract': DataContractKeyClass,
37116
38198
  'dataHubConnection': DataHubConnectionKeyClass,
@@ -37125,4 +38207,145 @@ KEY_ASPECTS: Dict[str, Type[_Aspect]] = {
37125
38207
  'telemetry': TelemetryKeyClass
37126
38208
  }
37127
38209
 
38210
+ ENTITY_TYPE_NAMES: List[str] = [
38211
+ 'dataProduct',
38212
+ 'structuredProperty',
38213
+ 'entityType',
38214
+ 'platformResource',
38215
+ 'businessAttribute',
38216
+ 'dataType',
38217
+ 'inviteToken',
38218
+ 'assertion',
38219
+ 'schemaField',
38220
+ 'corpGroup',
38221
+ 'role',
38222
+ 'dataProcessInstance',
38223
+ 'dataHubSecret',
38224
+ 'dataHubPersona',
38225
+ 'mlFeatureTable',
38226
+ 'glossaryTerm',
38227
+ 'recommendationModule',
38228
+ 'post',
38229
+ 'mlPrimaryKey',
38230
+ 'dataPlatformInstance',
38231
+ 'dataHubMetricCube',
38232
+ 'query',
38233
+ 'dataset',
38234
+ 'dataHubExecutionRequest',
38235
+ 'chart',
38236
+ 'mlModelGroup',
38237
+ 'subscription',
38238
+ 'anomaly',
38239
+ 'dataHubRemoteExecutorPool',
38240
+ 'dataJob',
38241
+ 'domain',
38242
+ 'erModelRelationship',
38243
+ 'dataHubRole',
38244
+ 'form',
38245
+ 'dataHubAccessToken',
38246
+ 'dataHubAction',
38247
+ 'dataPlatform',
38248
+ 'container',
38249
+ 'tag',
38250
+ 'dataHubPolicy',
38251
+ 'constraint',
38252
+ 'glossaryNode',
38253
+ 'dataProcess',
38254
+ 'dataHubStepState',
38255
+ 'notebook',
38256
+ 'mlFeature',
38257
+ 'dataHubUpgrade',
38258
+ 'dashboard',
38259
+ 'mlModel',
38260
+ 'incident',
38261
+ 'globalSettings',
38262
+ 'dataHubView',
38263
+ 'dataHubRetention',
38264
+ 'ownershipType',
38265
+ 'actionRequest',
38266
+ 'dataHubRemoteExecutorGlobalConfig',
38267
+ 'dataFlow',
38268
+ 'dataContract',
38269
+ 'dataHubConnection',
38270
+ 'dataHubRemoteExecutor',
38271
+ 'monitor',
38272
+ 'corpuser',
38273
+ 'versionSet',
38274
+ 'test',
38275
+ 'dataHubIngestionSource',
38276
+ 'mlModelDeployment',
38277
+ 'linkPreview',
38278
+ 'telemetry'
38279
+ ]
38280
+ EntityTypeName = Literal[
38281
+ 'dataProduct',
38282
+ 'structuredProperty',
38283
+ 'entityType',
38284
+ 'platformResource',
38285
+ 'businessAttribute',
38286
+ 'dataType',
38287
+ 'inviteToken',
38288
+ 'assertion',
38289
+ 'schemaField',
38290
+ 'corpGroup',
38291
+ 'role',
38292
+ 'dataProcessInstance',
38293
+ 'dataHubSecret',
38294
+ 'dataHubPersona',
38295
+ 'mlFeatureTable',
38296
+ 'glossaryTerm',
38297
+ 'recommendationModule',
38298
+ 'post',
38299
+ 'mlPrimaryKey',
38300
+ 'dataPlatformInstance',
38301
+ 'dataHubMetricCube',
38302
+ 'query',
38303
+ 'dataset',
38304
+ 'dataHubExecutionRequest',
38305
+ 'chart',
38306
+ 'mlModelGroup',
38307
+ 'subscription',
38308
+ 'anomaly',
38309
+ 'dataHubRemoteExecutorPool',
38310
+ 'dataJob',
38311
+ 'domain',
38312
+ 'erModelRelationship',
38313
+ 'dataHubRole',
38314
+ 'form',
38315
+ 'dataHubAccessToken',
38316
+ 'dataHubAction',
38317
+ 'dataPlatform',
38318
+ 'container',
38319
+ 'tag',
38320
+ 'dataHubPolicy',
38321
+ 'constraint',
38322
+ 'glossaryNode',
38323
+ 'dataProcess',
38324
+ 'dataHubStepState',
38325
+ 'notebook',
38326
+ 'mlFeature',
38327
+ 'dataHubUpgrade',
38328
+ 'dashboard',
38329
+ 'mlModel',
38330
+ 'incident',
38331
+ 'globalSettings',
38332
+ 'dataHubView',
38333
+ 'dataHubRetention',
38334
+ 'ownershipType',
38335
+ 'actionRequest',
38336
+ 'dataHubRemoteExecutorGlobalConfig',
38337
+ 'dataFlow',
38338
+ 'dataContract',
38339
+ 'dataHubConnection',
38340
+ 'dataHubRemoteExecutor',
38341
+ 'monitor',
38342
+ 'corpuser',
38343
+ 'versionSet',
38344
+ 'test',
38345
+ 'dataHubIngestionSource',
38346
+ 'mlModelDeployment',
38347
+ 'linkPreview',
38348
+ 'telemetry'
38349
+ ]
38350
+
37128
38351
  # fmt: on