acryl-datahub-cloud 0.3.8.2rc4__py3-none-any.whl → 0.3.9rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub-cloud might be problematic. Click here for more details.

Files changed (78) hide show
  1. acryl_datahub_cloud/_codegen_config.json +1 -1
  2. acryl_datahub_cloud/acryl_cs_issues/acryl_customer.py +1 -1
  3. acryl_datahub_cloud/action_request/__init__.py +0 -0
  4. acryl_datahub_cloud/action_request/action_request_owner_source.py +174 -0
  5. acryl_datahub_cloud/api/__init__.py +1 -1
  6. acryl_datahub_cloud/api/client.py +2 -2
  7. acryl_datahub_cloud/datahub_reporting/datahub_dataset.py +6 -6
  8. acryl_datahub_cloud/datahub_reporting/datahub_form_reporting.py +67 -33
  9. acryl_datahub_cloud/datahub_reporting/extract_sql.py +4 -4
  10. acryl_datahub_cloud/datahub_usage_reporting/query_builder.py +1 -0
  11. acryl_datahub_cloud/datahub_usage_reporting/usage_feature_patch_builder.py +21 -21
  12. acryl_datahub_cloud/datahub_usage_reporting/usage_feature_reporter.py +151 -141
  13. acryl_datahub_cloud/metadata/_urns/urn_defs.py +1064 -418
  14. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/actionrequest/__init__.py +6 -0
  15. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/dataplatforminstance/__init__.py +2 -0
  16. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/dataset/__init__.py +2 -0
  17. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/executorglobalconfig/__init__.py +15 -0
  18. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/executorpool/__init__.py +4 -0
  19. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/metadata/key/__init__.py +4 -0
  20. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/metric/__init__.py +29 -0
  21. acryl_datahub_cloud/metadata/schema.avsc +778 -42
  22. acryl_datahub_cloud/metadata/schema_classes.py +1089 -61
  23. acryl_datahub_cloud/metadata/schemas/ActionRequestInfo.avsc +422 -12
  24. acryl_datahub_cloud/metadata/schemas/ActionRequestStatus.avsc +12 -0
  25. acryl_datahub_cloud/metadata/schemas/AssertionAnalyticsRunEvent.avsc +5 -3
  26. acryl_datahub_cloud/metadata/schemas/AssertionInfo.avsc +5 -3
  27. acryl_datahub_cloud/metadata/schemas/AssertionRunEvent.avsc +5 -3
  28. acryl_datahub_cloud/metadata/schemas/BusinessAttributeInfo.avsc +6 -2
  29. acryl_datahub_cloud/metadata/schemas/BusinessAttributes.avsc +6 -0
  30. acryl_datahub_cloud/metadata/schemas/ChartInfo.avsc +1 -0
  31. acryl_datahub_cloud/metadata/schemas/ChartKey.avsc +3 -3
  32. acryl_datahub_cloud/metadata/schemas/ContainerKey.avsc +1 -1
  33. acryl_datahub_cloud/metadata/schemas/CorpGroupKey.avsc +1 -1
  34. acryl_datahub_cloud/metadata/schemas/DashboardKey.avsc +3 -3
  35. acryl_datahub_cloud/metadata/schemas/DataFlowKey.avsc +1 -1
  36. acryl_datahub_cloud/metadata/schemas/DataHubActionInfo.avsc +1 -1
  37. acryl_datahub_cloud/metadata/schemas/DataHubConnectionKey.avsc +2 -1
  38. acryl_datahub_cloud/metadata/schemas/DataHubIngestionSourceInfo.avsc +9 -4
  39. acryl_datahub_cloud/metadata/schemas/DataHubMetricCubeDefinition.avsc +185 -0
  40. acryl_datahub_cloud/metadata/schemas/DataHubMetricCubeEvent.avsc +184 -0
  41. acryl_datahub_cloud/metadata/schemas/DataHubMetricCubeKey.avsc +22 -0
  42. acryl_datahub_cloud/metadata/schemas/DataJobKey.avsc +4 -4
  43. acryl_datahub_cloud/metadata/schemas/DataPlatformInstanceKey.avsc +2 -1
  44. acryl_datahub_cloud/metadata/schemas/DataProcessInstanceInput.avsc +4 -2
  45. acryl_datahub_cloud/metadata/schemas/DataProcessInstanceOutput.avsc +2 -0
  46. acryl_datahub_cloud/metadata/schemas/DatasetKey.avsc +14 -13
  47. acryl_datahub_cloud/metadata/schemas/EditableSchemaMetadata.avsc +6 -2
  48. acryl_datahub_cloud/metadata/schemas/ExecutionRequestInput.avsc +6 -1
  49. acryl_datahub_cloud/metadata/schemas/ExecutionRequestSignal.avsc +1 -1
  50. acryl_datahub_cloud/metadata/schemas/GlossaryTerms.avsc +3 -1
  51. acryl_datahub_cloud/metadata/schemas/IcebergCatalogInfo.avsc +28 -0
  52. acryl_datahub_cloud/metadata/schemas/IcebergWarehouseInfo.avsc +96 -0
  53. acryl_datahub_cloud/metadata/schemas/IncidentActivityEvent.avsc +4 -1
  54. acryl_datahub_cloud/metadata/schemas/IncidentInfo.avsc +4 -1
  55. acryl_datahub_cloud/metadata/schemas/InputFields.avsc +3 -1
  56. acryl_datahub_cloud/metadata/schemas/MLFeatureKey.avsc +1 -1
  57. acryl_datahub_cloud/metadata/schemas/MLFeatureTableKey.avsc +1 -1
  58. acryl_datahub_cloud/metadata/schemas/MLModelGroupKey.avsc +1 -1
  59. acryl_datahub_cloud/metadata/schemas/MLModelKey.avsc +3 -3
  60. acryl_datahub_cloud/metadata/schemas/MLPrimaryKeyKey.avsc +1 -1
  61. acryl_datahub_cloud/metadata/schemas/MetadataChangeEvent.avsc +399 -176
  62. acryl_datahub_cloud/metadata/schemas/MonitorInfo.avsc +6 -4
  63. acryl_datahub_cloud/metadata/schemas/NotebookKey.avsc +1 -1
  64. acryl_datahub_cloud/metadata/schemas/Operation.avsc +4 -2
  65. acryl_datahub_cloud/metadata/schemas/RemoteExecutorGlobalConfigKey.avsc +21 -0
  66. acryl_datahub_cloud/metadata/schemas/RemoteExecutorPoolGlobalConfig.avsc +16 -0
  67. acryl_datahub_cloud/metadata/schemas/RemoteExecutorPoolInfo.avsc +85 -0
  68. acryl_datahub_cloud/metadata/schemas/RemoteExecutorPoolKey.avsc +1 -1
  69. acryl_datahub_cloud/metadata/schemas/RemoteExecutorStatus.avsc +5 -5
  70. acryl_datahub_cloud/metadata/schemas/SchemaFieldKey.avsc +2 -2
  71. acryl_datahub_cloud/metadata/schemas/SchemaMetadata.avsc +3 -1
  72. acryl_datahub_cloud/metadata/schemas/StructuredPropertyDefinition.avsc +14 -0
  73. {acryl_datahub_cloud-0.3.8.2rc4.dist-info → acryl_datahub_cloud-0.3.9rc1.dist-info}/METADATA +43 -43
  74. {acryl_datahub_cloud-0.3.8.2rc4.dist-info → acryl_datahub_cloud-0.3.9rc1.dist-info}/RECORD +77 -67
  75. {acryl_datahub_cloud-0.3.8.2rc4.dist-info → acryl_datahub_cloud-0.3.9rc1.dist-info}/WHEEL +1 -1
  76. {acryl_datahub_cloud-0.3.8.2rc4.dist-info → acryl_datahub_cloud-0.3.9rc1.dist-info}/entry_points.txt +1 -0
  77. acryl_datahub_cloud/api/entity_versioning.py +0 -167
  78. {acryl_datahub_cloud-0.3.8.2rc4.dist-info → acryl_datahub_cloud-0.3.9rc1.dist-info}/top_level.txt +0 -0
@@ -301,7 +301,7 @@ class DataHubActionConfigClass(DictWrapper):
301
301
 
302
302
  @property
303
303
  def executorId(self) -> Union[None, str]:
304
- """The id of the executor to use to execute the automation. Defaults to 'default' (runs locally)"""
304
+ """The id of the executor pool to use to execute the automation. Defaults to 'default' (runs locally embedded)"""
305
305
  return self._inner_dict.get('executorId') # type: ignore
306
306
 
307
307
  @executorId.setter
@@ -720,6 +720,7 @@ class ActionRequestInfoClass(_Aspect):
720
720
  subResourceType: Union[None, str]=None,
721
721
  subResource: Union[None, str]=None,
722
722
  params: Union[None, "ActionRequestParamsClass"]=None,
723
+ description: Union[None, str]=None,
723
724
  dueDate: Union[None, int]=None,
724
725
  origin: Optional[Union[Union[str, "ActionRequestOriginClass"], None]]=None,
725
726
  inferenceMetadata: Union[None, "InferenceMetadataClass"]=None,
@@ -737,6 +738,7 @@ class ActionRequestInfoClass(_Aspect):
737
738
  self.params = params
738
739
  self.created = created
739
740
  self.createdBy = createdBy
741
+ self.description = description
740
742
  self.dueDate = dueDate
741
743
  if origin is None:
742
744
  # default: 'MANUAL'
@@ -757,6 +759,7 @@ class ActionRequestInfoClass(_Aspect):
757
759
  self.params = self.RECORD_SCHEMA.fields_dict["params"].default
758
760
  self.created = int()
759
761
  self.createdBy = str()
762
+ self.description = self.RECORD_SCHEMA.fields_dict["description"].default
760
763
  self.dueDate = self.RECORD_SCHEMA.fields_dict["dueDate"].default
761
764
  self.origin = self.RECORD_SCHEMA.fields_dict["origin"].default
762
765
  self.inferenceMetadata = self.RECORD_SCHEMA.fields_dict["inferenceMetadata"].default
@@ -774,7 +777,9 @@ class ActionRequestInfoClass(_Aspect):
774
777
 
775
778
  @property
776
779
  def assignedUsers(self) -> List[str]:
777
- """The users this action request is assigned to"""
780
+ """The users this action request is assigned to.
781
+ By default, action requests are assigned to Dataset Owners
782
+ and with anyone who has the View Proposals platform privilege."""
778
783
  return self._inner_dict.get('assignedUsers') # type: ignore
779
784
 
780
785
  @assignedUsers.setter
@@ -784,7 +789,9 @@ class ActionRequestInfoClass(_Aspect):
784
789
 
785
790
  @property
786
791
  def assignedGroups(self) -> List[str]:
787
- """The groups this action request is assigned to"""
792
+ """The groups this action request is assigned to
793
+ By default, action requests are assigned to Dataset Owners
794
+ and with anyone who has the View Proposals platform privilege."""
788
795
  return self._inner_dict.get('assignedGroups') # type: ignore
789
796
 
790
797
  @assignedGroups.setter
@@ -794,7 +801,8 @@ class ActionRequestInfoClass(_Aspect):
794
801
 
795
802
  @property
796
803
  def assignedRoles(self) -> Union[None, List[str]]:
797
- """The roles this action request is assigned to"""
804
+ """The roles this action request is assigned to
805
+ By default, action requests are assigned any roles that have the View Proposals platform privilege."""
798
806
  return self._inner_dict.get('assignedRoles') # type: ignore
799
807
 
800
808
  @assignedRoles.setter
@@ -834,7 +842,8 @@ class ActionRequestInfoClass(_Aspect):
834
842
 
835
843
  @property
836
844
  def subResource(self) -> Union[None, str]:
837
- """The sub-resource identifier that the action is associated with, for example 'fieldName'"""
845
+ """The sub-resource identifier that the action is associated with, for example 'fieldName'.
846
+ Currently, this is only used for Field Paths & schema fields."""
838
847
  return self._inner_dict.get('subResource') # type: ignore
839
848
 
840
849
  @subResource.setter
@@ -872,6 +881,17 @@ class ActionRequestInfoClass(_Aspect):
872
881
  self._inner_dict['createdBy'] = value
873
882
 
874
883
 
884
+ @property
885
+ def description(self) -> Union[None, str]:
886
+ """An optional description that can be added to the action request
887
+ to explain the intention behind it."""
888
+ return self._inner_dict.get('description') # type: ignore
889
+
890
+ @description.setter
891
+ def description(self, value: Union[None, str]) -> None:
892
+ self._inner_dict['description'] = value
893
+
894
+
875
895
  @property
876
896
  def dueDate(self) -> Union[None, int]:
877
897
  """The time at which the request is due"""
@@ -921,6 +941,9 @@ class ActionRequestParamsClass(DictWrapper):
921
941
  def __init__(self,
922
942
  glossaryTermProposal: Union[None, "GlossaryTermProposalClass"]=None,
923
943
  tagProposal: Union[None, "TagProposalClass"]=None,
944
+ domainProposal: Union[None, "DomainProposalClass"]=None,
945
+ ownerProposal: Union[None, "OwnerProposalClass"]=None,
946
+ structuredPropertyProposal: Union[None, "StructuredPropertyProposalClass"]=None,
924
947
  createGlossaryTermProposal: Union[None, "CreateGlossaryTermProposalClass"]=None,
925
948
  createGlossaryNodeProposal: Union[None, "CreateGlossaryNodeProposalClass"]=None,
926
949
  updateDescriptionProposal: Union[None, "DescriptionProposalClass"]=None,
@@ -930,6 +953,9 @@ class ActionRequestParamsClass(DictWrapper):
930
953
 
931
954
  self.glossaryTermProposal = glossaryTermProposal
932
955
  self.tagProposal = tagProposal
956
+ self.domainProposal = domainProposal
957
+ self.ownerProposal = ownerProposal
958
+ self.structuredPropertyProposal = structuredPropertyProposal
933
959
  self.createGlossaryTermProposal = createGlossaryTermProposal
934
960
  self.createGlossaryNodeProposal = createGlossaryNodeProposal
935
961
  self.updateDescriptionProposal = updateDescriptionProposal
@@ -938,6 +964,9 @@ class ActionRequestParamsClass(DictWrapper):
938
964
  def _restore_defaults(self) -> None:
939
965
  self.glossaryTermProposal = self.RECORD_SCHEMA.fields_dict["glossaryTermProposal"].default
940
966
  self.tagProposal = self.RECORD_SCHEMA.fields_dict["tagProposal"].default
967
+ self.domainProposal = self.RECORD_SCHEMA.fields_dict["domainProposal"].default
968
+ self.ownerProposal = self.RECORD_SCHEMA.fields_dict["ownerProposal"].default
969
+ self.structuredPropertyProposal = self.RECORD_SCHEMA.fields_dict["structuredPropertyProposal"].default
941
970
  self.createGlossaryTermProposal = self.RECORD_SCHEMA.fields_dict["createGlossaryTermProposal"].default
942
971
  self.createGlossaryNodeProposal = self.RECORD_SCHEMA.fields_dict["createGlossaryNodeProposal"].default
943
972
  self.updateDescriptionProposal = self.RECORD_SCHEMA.fields_dict["updateDescriptionProposal"].default
@@ -946,7 +975,8 @@ class ActionRequestParamsClass(DictWrapper):
946
975
 
947
976
  @property
948
977
  def glossaryTermProposal(self) -> Union[None, "GlossaryTermProposalClass"]:
949
- """An optional set of information specific to term proposals."""
978
+ """An optional set of information specific to term proposals.
979
+ TODO: Add validation that ensures that glossaryTerm or glossaryTerms field is provided, but not both."""
950
980
  return self._inner_dict.get('glossaryTermProposal') # type: ignore
951
981
 
952
982
  @glossaryTermProposal.setter
@@ -956,7 +986,8 @@ class ActionRequestParamsClass(DictWrapper):
956
986
 
957
987
  @property
958
988
  def tagProposal(self) -> Union[None, "TagProposalClass"]:
959
- """An optional set of information specific to tag proposals."""
989
+ """An optional set of information specific to tag proposals.
990
+ TODO: Add validation that ensures that tag or tagUrns field is provided, but not both."""
960
991
  return self._inner_dict.get('tagProposal') # type: ignore
961
992
 
962
993
  @tagProposal.setter
@@ -964,6 +995,36 @@ class ActionRequestParamsClass(DictWrapper):
964
995
  self._inner_dict['tagProposal'] = value
965
996
 
966
997
 
998
+ @property
999
+ def domainProposal(self) -> Union[None, "DomainProposalClass"]:
1000
+ """An optional set of information specific to domain proposals."""
1001
+ return self._inner_dict.get('domainProposal') # type: ignore
1002
+
1003
+ @domainProposal.setter
1004
+ def domainProposal(self, value: Union[None, "DomainProposalClass"]) -> None:
1005
+ self._inner_dict['domainProposal'] = value
1006
+
1007
+
1008
+ @property
1009
+ def ownerProposal(self) -> Union[None, "OwnerProposalClass"]:
1010
+ """An optional set of information specific to ownership proposals."""
1011
+ return self._inner_dict.get('ownerProposal') # type: ignore
1012
+
1013
+ @ownerProposal.setter
1014
+ def ownerProposal(self, value: Union[None, "OwnerProposalClass"]) -> None:
1015
+ self._inner_dict['ownerProposal'] = value
1016
+
1017
+
1018
+ @property
1019
+ def structuredPropertyProposal(self) -> Union[None, "StructuredPropertyProposalClass"]:
1020
+ """An optional set of information specific to structured property proposals."""
1021
+ return self._inner_dict.get('structuredPropertyProposal') # type: ignore
1022
+
1023
+ @structuredPropertyProposal.setter
1024
+ def structuredPropertyProposal(self, value: Union[None, "StructuredPropertyProposalClass"]) -> None:
1025
+ self._inner_dict['structuredPropertyProposal'] = value
1026
+
1027
+
967
1028
  @property
968
1029
  def createGlossaryTermProposal(self) -> Union[None, "CreateGlossaryTermProposalClass"]:
969
1030
  """An optional set of information specific to proposals for creating new Glossary Terms."""
@@ -1016,16 +1077,19 @@ class ActionRequestStatusClass(_Aspect):
1016
1077
  status: str,
1017
1078
  lastModified: "AuditStampClass",
1018
1079
  result: Union[None, str]=None,
1080
+ note: Union[None, str]=None,
1019
1081
  ):
1020
1082
  super().__init__()
1021
1083
 
1022
1084
  self.status = status
1023
1085
  self.result = result
1086
+ self.note = note
1024
1087
  self.lastModified = lastModified
1025
1088
 
1026
1089
  def _restore_defaults(self) -> None:
1027
1090
  self.status = str()
1028
1091
  self.result = self.RECORD_SCHEMA.fields_dict["result"].default
1092
+ self.note = self.RECORD_SCHEMA.fields_dict["note"].default
1029
1093
  self.lastModified = AuditStampClass._construct_with_defaults()
1030
1094
 
1031
1095
 
@@ -1049,6 +1113,17 @@ class ActionRequestStatusClass(_Aspect):
1049
1113
  self._inner_dict['result'] = value
1050
1114
 
1051
1115
 
1116
+ @property
1117
+ def note(self) -> Union[None, str]:
1118
+ """Optional note associated with the status.
1119
+ E.g. if the request is rejected, the reason for rejection. If the request is approved, the reason for approval."""
1120
+ return self._inner_dict.get('note') # type: ignore
1121
+
1122
+ @note.setter
1123
+ def note(self, value: Union[None, str]) -> None:
1124
+ self._inner_dict['note'] = value
1125
+
1126
+
1052
1127
  @property
1053
1128
  def lastModified(self) -> "AuditStampClass":
1054
1129
  """Audit stamp containing who last modified the status and when."""
@@ -1265,56 +1340,164 @@ class DescriptionProposalClass(DictWrapper):
1265
1340
  self._inner_dict['description'] = value
1266
1341
 
1267
1342
 
1343
+ class DomainProposalClass(DictWrapper):
1344
+ # No docs available.
1345
+
1346
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.actionrequest.DomainProposal")
1347
+ def __init__(self,
1348
+ domains: List[str],
1349
+ ):
1350
+ super().__init__()
1351
+
1352
+ self.domains = domains
1353
+
1354
+ def _restore_defaults(self) -> None:
1355
+ self.domains = list()
1356
+
1357
+
1358
+ @property
1359
+ def domains(self) -> List[str]:
1360
+ """The urns of the domain(s) being proposed. Currently, only 1 domain is supported per asset.
1361
+ If this changes in the future, this data modeling will suffice."""
1362
+ return self._inner_dict.get('domains') # type: ignore
1363
+
1364
+ @domains.setter
1365
+ def domains(self, value: List[str]) -> None:
1366
+ self._inner_dict['domains'] = value
1367
+
1368
+
1268
1369
  class GlossaryTermProposalClass(DictWrapper):
1269
1370
  # No docs available.
1270
1371
 
1271
1372
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.actionrequest.GlossaryTermProposal")
1272
1373
  def __init__(self,
1273
- glossaryTerm: str,
1374
+ glossaryTerm: Union[None, str]=None,
1375
+ glossaryTerms: Union[None, List[str]]=None,
1274
1376
  ):
1275
1377
  super().__init__()
1276
1378
 
1277
1379
  self.glossaryTerm = glossaryTerm
1380
+ self.glossaryTerms = glossaryTerms
1278
1381
 
1279
1382
  def _restore_defaults(self) -> None:
1280
- self.glossaryTerm = str()
1383
+ self.glossaryTerm = self.RECORD_SCHEMA.fields_dict["glossaryTerm"].default
1384
+ self.glossaryTerms = self.RECORD_SCHEMA.fields_dict["glossaryTerms"].default
1281
1385
 
1282
1386
 
1283
1387
  @property
1284
- def glossaryTerm(self) -> str:
1285
- """The urn of the glossary term being proposed."""
1388
+ def glossaryTerm(self) -> Union[None, str]:
1389
+ """This field is deprecated and will be removed in a future version. Use glossaryTerms instead.
1390
+ The urn of the glossary term being proposed."""
1286
1391
  return self._inner_dict.get('glossaryTerm') # type: ignore
1287
1392
 
1288
1393
  @glossaryTerm.setter
1289
- def glossaryTerm(self, value: str) -> None:
1394
+ def glossaryTerm(self, value: Union[None, str]) -> None:
1290
1395
  self._inner_dict['glossaryTerm'] = value
1291
1396
 
1292
1397
 
1398
+ @property
1399
+ def glossaryTerms(self) -> Union[None, List[str]]:
1400
+ """The urns of the glossary terms being proposed.
1401
+ Use this field over glossaryTerm."""
1402
+ return self._inner_dict.get('glossaryTerms') # type: ignore
1403
+
1404
+ @glossaryTerms.setter
1405
+ def glossaryTerms(self, value: Union[None, List[str]]) -> None:
1406
+ self._inner_dict['glossaryTerms'] = value
1407
+
1408
+
1409
+ class OwnerProposalClass(DictWrapper):
1410
+ # No docs available.
1411
+
1412
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.actionrequest.OwnerProposal")
1413
+ def __init__(self,
1414
+ owners: Union[None, List["OwnerClass"]]=None,
1415
+ ):
1416
+ super().__init__()
1417
+
1418
+ self.owners = owners
1419
+
1420
+ def _restore_defaults(self) -> None:
1421
+ self.owners = self.RECORD_SCHEMA.fields_dict["owners"].default
1422
+
1423
+
1424
+ @property
1425
+ def owners(self) -> Union[None, List["OwnerClass"]]:
1426
+ """The urns of the owner(s) being proposed."""
1427
+ return self._inner_dict.get('owners') # type: ignore
1428
+
1429
+ @owners.setter
1430
+ def owners(self, value: Union[None, List["OwnerClass"]]) -> None:
1431
+ self._inner_dict['owners'] = value
1432
+
1433
+
1434
+ class StructuredPropertyProposalClass(DictWrapper):
1435
+ # No docs available.
1436
+
1437
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.actionrequest.StructuredPropertyProposal")
1438
+ def __init__(self,
1439
+ structuredPropertyValues: Union[None, List["StructuredPropertyValueAssignmentClass"]]=None,
1440
+ ):
1441
+ super().__init__()
1442
+
1443
+ self.structuredPropertyValues = structuredPropertyValues
1444
+
1445
+ def _restore_defaults(self) -> None:
1446
+ self.structuredPropertyValues = self.RECORD_SCHEMA.fields_dict["structuredPropertyValues"].default
1447
+
1448
+
1449
+ @property
1450
+ def structuredPropertyValues(self) -> Union[None, List["StructuredPropertyValueAssignmentClass"]]:
1451
+ """The urns of the domain(s) being proposed. Currently, only 1 domain is supported per asset.
1452
+ If this changes in the future, the data model will be ready.
1453
+ TODO: Decide if indexing the value would also be useful."""
1454
+ return self._inner_dict.get('structuredPropertyValues') # type: ignore
1455
+
1456
+ @structuredPropertyValues.setter
1457
+ def structuredPropertyValues(self, value: Union[None, List["StructuredPropertyValueAssignmentClass"]]) -> None:
1458
+ self._inner_dict['structuredPropertyValues'] = value
1459
+
1460
+
1293
1461
  class TagProposalClass(DictWrapper):
1294
1462
  # No docs available.
1295
1463
 
1296
1464
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.actionrequest.TagProposal")
1297
1465
  def __init__(self,
1298
- tag: str,
1466
+ tag: Union[None, str]=None,
1467
+ tags: Union[None, List[str]]=None,
1299
1468
  ):
1300
1469
  super().__init__()
1301
1470
 
1302
1471
  self.tag = tag
1472
+ self.tags = tags
1303
1473
 
1304
1474
  def _restore_defaults(self) -> None:
1305
- self.tag = str()
1475
+ self.tag = self.RECORD_SCHEMA.fields_dict["tag"].default
1476
+ self.tags = self.RECORD_SCHEMA.fields_dict["tags"].default
1306
1477
 
1307
1478
 
1308
1479
  @property
1309
- def tag(self) -> str:
1310
- """The urn of the tag being proposed."""
1480
+ def tag(self) -> Union[None, str]:
1481
+ """This field is deprecated and will be removed in a future version. Use tags instead.
1482
+ The urn of the tag being proposed."""
1311
1483
  return self._inner_dict.get('tag') # type: ignore
1312
1484
 
1313
1485
  @tag.setter
1314
- def tag(self, value: str) -> None:
1486
+ def tag(self, value: Union[None, str]) -> None:
1315
1487
  self._inner_dict['tag'] = value
1316
1488
 
1317
1489
 
1490
+ @property
1491
+ def tags(self) -> Union[None, List[str]]:
1492
+ """The urns of the glossary terms being proposed.
1493
+ Use this field over glossaryTerm."""
1494
+ return self._inner_dict.get('tags') # type: ignore
1495
+
1496
+ @tags.setter
1497
+ def tags(self, value: Union[None, List[str]]) -> None:
1498
+ self._inner_dict['tags'] = value
1499
+
1500
+
1318
1501
  class AiInferenceMetadataClass(_Aspect):
1319
1502
  """AI Inference Metadata of various types."""
1320
1503
 
@@ -3296,13 +3479,15 @@ class AssertionSourceTypeClass(object):
3296
3479
  # No docs available.
3297
3480
 
3298
3481
  NATIVE = "NATIVE"
3299
- """The assertion was defined natively on DataHub by a user."""
3482
+ """The assertion was defined natively on DataHub by a user.
3483
+ DataHub Cloud only"""
3300
3484
 
3301
3485
  EXTERNAL = "EXTERNAL"
3302
3486
  """The assertion was defined and managed externally of DataHub."""
3303
3487
 
3304
3488
  INFERRED = "INFERRED"
3305
- """The assertion was inferred, e.g. from offline AI / ML models."""
3489
+ """The assertion was inferred, e.g. from offline AI / ML models.
3490
+ DataHub Cloud only"""
3306
3491
 
3307
3492
 
3308
3493
 
@@ -8759,7 +8944,7 @@ class OperationTypeClass(object):
8759
8944
  """Asset was dropped"""
8760
8945
 
8761
8946
  CUSTOM = "CUSTOM"
8762
- """Custom asset operation"""
8947
+ """Custom asset operation. If this is set, ensure customOperationType is filled out."""
8763
8948
 
8764
8949
  UNKNOWN = "UNKNOWN"
8765
8950
 
@@ -13201,6 +13386,113 @@ class DataPlatformInstancePropertiesClass(_Aspect):
13201
13386
  self._inner_dict['description'] = value
13202
13387
 
13203
13388
 
13389
+ class IcebergWarehouseInfoClass(_Aspect):
13390
+ """An Iceberg warehouse location and credentails whose read/writes are governed by datahub catalog."""
13391
+
13392
+
13393
+ ASPECT_NAME = 'icebergWarehouseInfo'
13394
+ ASPECT_INFO = {}
13395
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.dataplatforminstance.IcebergWarehouseInfo")
13396
+
13397
+ def __init__(self,
13398
+ dataRoot: str,
13399
+ clientId: str,
13400
+ clientSecret: str,
13401
+ region: str,
13402
+ env: Union[str, "FabricTypeClass"],
13403
+ role: Union[None, str]=None,
13404
+ tempCredentialExpirationSeconds: Union[None, int]=None,
13405
+ ):
13406
+ super().__init__()
13407
+
13408
+ self.dataRoot = dataRoot
13409
+ self.clientId = clientId
13410
+ self.clientSecret = clientSecret
13411
+ self.region = region
13412
+ self.role = role
13413
+ self.tempCredentialExpirationSeconds = tempCredentialExpirationSeconds
13414
+ self.env = env
13415
+
13416
+ def _restore_defaults(self) -> None:
13417
+ self.dataRoot = str()
13418
+ self.clientId = str()
13419
+ self.clientSecret = str()
13420
+ self.region = str()
13421
+ self.role = self.RECORD_SCHEMA.fields_dict["role"].default
13422
+ self.tempCredentialExpirationSeconds = self.RECORD_SCHEMA.fields_dict["tempCredentialExpirationSeconds"].default
13423
+ self.env = FabricTypeClass.DEV
13424
+
13425
+
13426
+ @property
13427
+ def dataRoot(self) -> str:
13428
+ """Path of the root for the backing store of the tables in the warehouse."""
13429
+ return self._inner_dict.get('dataRoot') # type: ignore
13430
+
13431
+ @dataRoot.setter
13432
+ def dataRoot(self, value: str) -> None:
13433
+ self._inner_dict['dataRoot'] = value
13434
+
13435
+
13436
+ @property
13437
+ def clientId(self) -> str:
13438
+ """clientId to be used to authenticate with storage hosting this warehouse"""
13439
+ return self._inner_dict.get('clientId') # type: ignore
13440
+
13441
+ @clientId.setter
13442
+ def clientId(self, value: str) -> None:
13443
+ self._inner_dict['clientId'] = value
13444
+
13445
+
13446
+ @property
13447
+ def clientSecret(self) -> str:
13448
+ """client secret to authenticate with storage hosting this warehouse"""
13449
+ return self._inner_dict.get('clientSecret') # type: ignore
13450
+
13451
+ @clientSecret.setter
13452
+ def clientSecret(self, value: str) -> None:
13453
+ self._inner_dict['clientSecret'] = value
13454
+
13455
+
13456
+ @property
13457
+ def region(self) -> str:
13458
+ """region where the warehouse is located."""
13459
+ return self._inner_dict.get('region') # type: ignore
13460
+
13461
+ @region.setter
13462
+ def region(self, value: str) -> None:
13463
+ self._inner_dict['region'] = value
13464
+
13465
+
13466
+ @property
13467
+ def role(self) -> Union[None, str]:
13468
+ # No docs available.
13469
+ return self._inner_dict.get('role') # type: ignore
13470
+
13471
+ @role.setter
13472
+ def role(self, value: Union[None, str]) -> None:
13473
+ self._inner_dict['role'] = value
13474
+
13475
+
13476
+ @property
13477
+ def tempCredentialExpirationSeconds(self) -> Union[None, int]:
13478
+ # No docs available.
13479
+ return self._inner_dict.get('tempCredentialExpirationSeconds') # type: ignore
13480
+
13481
+ @tempCredentialExpirationSeconds.setter
13482
+ def tempCredentialExpirationSeconds(self, value: Union[None, int]) -> None:
13483
+ self._inner_dict['tempCredentialExpirationSeconds'] = value
13484
+
13485
+
13486
+ @property
13487
+ def env(self) -> Union[str, "FabricTypeClass"]:
13488
+ # No docs available.
13489
+ return self._inner_dict.get('env') # type: ignore
13490
+
13491
+ @env.setter
13492
+ def env(self, value: Union[str, "FabricTypeClass"]) -> None:
13493
+ self._inner_dict['env'] = value
13494
+
13495
+
13204
13496
  class DataProcessInfoClass(_Aspect):
13205
13497
  """The inputs and outputs of this data process"""
13206
13498
 
@@ -13264,7 +13556,7 @@ class DataProcessInstanceInputClass(_Aspect):
13264
13556
 
13265
13557
  @property
13266
13558
  def inputs(self) -> List[str]:
13267
- """Input datasets to be consumed"""
13559
+ """Input assets consumed"""
13268
13560
  return self._inner_dict.get('inputs') # type: ignore
13269
13561
 
13270
13562
  @inputs.setter
@@ -15186,6 +15478,49 @@ class HistogramClass(DictWrapper):
15186
15478
  self._inner_dict['heights'] = value
15187
15479
 
15188
15480
 
15481
+ class IcebergCatalogInfoClass(_Aspect):
15482
+ """Iceberg Catalog metadata associated with an Iceberg table/view"""
15483
+
15484
+
15485
+ ASPECT_NAME = 'icebergCatalogInfo'
15486
+ ASPECT_INFO = {}
15487
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.dataset.IcebergCatalogInfo")
15488
+
15489
+ def __init__(self,
15490
+ metadataPointer: Union[None, str]=None,
15491
+ view: Union[None, bool]=None,
15492
+ ):
15493
+ super().__init__()
15494
+
15495
+ self.metadataPointer = metadataPointer
15496
+ self.view = view
15497
+
15498
+ def _restore_defaults(self) -> None:
15499
+ self.metadataPointer = self.RECORD_SCHEMA.fields_dict["metadataPointer"].default
15500
+ self.view = self.RECORD_SCHEMA.fields_dict["view"].default
15501
+
15502
+
15503
+ @property
15504
+ def metadataPointer(self) -> Union[None, str]:
15505
+ """When Datahub is the REST Catalog for an Iceberg Table, stores the current metadata pointer.
15506
+ If the Iceberg table is managed by an external catalog, the metadata pointer is not set."""
15507
+ return self._inner_dict.get('metadataPointer') # type: ignore
15508
+
15509
+ @metadataPointer.setter
15510
+ def metadataPointer(self, value: Union[None, str]) -> None:
15511
+ self._inner_dict['metadataPointer'] = value
15512
+
15513
+
15514
+ @property
15515
+ def view(self) -> Union[None, bool]:
15516
+ # No docs available.
15517
+ return self._inner_dict.get('view') # type: ignore
15518
+
15519
+ @view.setter
15520
+ def view(self, value: Union[None, bool]) -> None:
15521
+ self._inner_dict['view'] = value
15522
+
15523
+
15189
15524
  class PartitionSummaryClass(DictWrapper):
15190
15525
  """Defines how the data is partitioned"""
15191
15526
 
@@ -16969,7 +17304,7 @@ class ExecutionRequestInputClass(_Aspect):
16969
17304
 
16970
17305
  @property
16971
17306
  def executorId(self) -> str:
16972
- """Advanced: specify a specific executor to route the request to. If none is provided, a "default" executor is used."""
17307
+ """Advanced: specify a specific executor pool to route the request to. If none is provided, a "default" embedded executor is used."""
16973
17308
  return self._inner_dict.get('executorId') # type: ignore
16974
17309
 
16975
17310
  @executorId.setter
@@ -17148,7 +17483,7 @@ class ExecutionRequestSignalClass(_Aspect):
17148
17483
 
17149
17484
  @property
17150
17485
  def executorId(self) -> Union[None, str]:
17151
- """Advanced: specify a specific executor to route the request to. If none is provided, a "default" executor is used."""
17486
+ """Advanced: specify a specific executor pool to route the request to. If none is provided, a default embedded executor is used."""
17152
17487
  return self._inner_dict.get('executorId') # type: ignore
17153
17488
 
17154
17489
  @executorId.setter
@@ -17279,7 +17614,7 @@ class RemoteExecutorStatusClass(_Aspect):
17279
17614
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.executor.RemoteExecutorStatus")
17280
17615
 
17281
17616
  def __init__(self,
17282
- poolName: str,
17617
+ executorPoolId: str,
17283
17618
  executorReleaseVersion: str,
17284
17619
  executorAddress: str,
17285
17620
  executorHostname: str,
@@ -17293,7 +17628,7 @@ class RemoteExecutorStatusClass(_Aspect):
17293
17628
  ):
17294
17629
  super().__init__()
17295
17630
 
17296
- self.poolName = poolName
17631
+ self.executorPoolId = executorPoolId
17297
17632
  self.executorReleaseVersion = executorReleaseVersion
17298
17633
  self.executorAddress = executorAddress
17299
17634
  self.executorHostname = executorHostname
@@ -17326,7 +17661,7 @@ class RemoteExecutorStatusClass(_Aspect):
17326
17661
  self.reportedAt = reportedAt
17327
17662
 
17328
17663
  def _restore_defaults(self) -> None:
17329
- self.poolName = str()
17664
+ self.executorPoolId = str()
17330
17665
  self.executorReleaseVersion = str()
17331
17666
  self.executorAddress = str()
17332
17667
  self.executorHostname = str()
@@ -17340,13 +17675,13 @@ class RemoteExecutorStatusClass(_Aspect):
17340
17675
 
17341
17676
 
17342
17677
  @property
17343
- def poolName(self) -> str:
17344
- """References the 'name' defined in RemoteExecutorPoolKey"""
17345
- return self._inner_dict.get('poolName') # type: ignore
17678
+ def executorPoolId(self) -> str:
17679
+ """References the 'id' defined in RemoteExecutorPoolKey"""
17680
+ return self._inner_dict.get('executorPoolId') # type: ignore
17346
17681
 
17347
- @poolName.setter
17348
- def poolName(self, value: str) -> None:
17349
- self._inner_dict['poolName'] = value
17682
+ @executorPoolId.setter
17683
+ def executorPoolId(self, value: str) -> None:
17684
+ self._inner_dict['executorPoolId'] = value
17350
17685
 
17351
17686
 
17352
17687
  @property
@@ -17391,7 +17726,8 @@ class RemoteExecutorStatusClass(_Aspect):
17391
17726
 
17392
17727
  @property
17393
17728
  def executorExpired(self) -> bool:
17394
- """Flag indicating whether remote executor status record is stale."""
17729
+ """Flag indicating whether remote executor status record is stale.
17730
+ This means executor no longer sends heartbeats and considered dead."""
17395
17731
  return self._inner_dict.get('executorExpired') # type: ignore
17396
17732
 
17397
17733
  @executorExpired.setter
@@ -17401,7 +17737,7 @@ class RemoteExecutorStatusClass(_Aspect):
17401
17737
 
17402
17738
  @property
17403
17739
  def executorStopped(self) -> bool:
17404
- """Flag indicating whether remote executor is stopped."""
17740
+ """Flag indicating whether remote executor is stopped, and properly reported its termination."""
17405
17741
  return self._inner_dict.get('executorStopped') # type: ignore
17406
17742
 
17407
17743
  @executorStopped.setter
@@ -17411,7 +17747,7 @@ class RemoteExecutorStatusClass(_Aspect):
17411
17747
 
17412
17748
  @property
17413
17749
  def executorEmbedded(self) -> bool:
17414
- """Flag indicating whether remote executor is embedded executor"""
17750
+ """Flag indicating whether remote executor is embedded into the coordinator pod"""
17415
17751
  return self._inner_dict.get('executorEmbedded') # type: ignore
17416
17752
 
17417
17753
  @executorEmbedded.setter
@@ -17450,6 +17786,36 @@ class RemoteExecutorStatusClass(_Aspect):
17450
17786
  self._inner_dict['reportedAt'] = value
17451
17787
 
17452
17788
 
17789
+ class RemoteExecutorPoolGlobalConfigClass(_Aspect):
17790
+ """Global singleton storing configs for remote executor pools."""
17791
+
17792
+
17793
+ ASPECT_NAME = 'dataHubRemoteExecutorPoolGlobalConfig'
17794
+ ASPECT_INFO = {}
17795
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.executorglobalconfig.RemoteExecutorPoolGlobalConfig")
17796
+
17797
+ def __init__(self,
17798
+ defaultExecutorPoolId: str,
17799
+ ):
17800
+ super().__init__()
17801
+
17802
+ self.defaultExecutorPoolId = defaultExecutorPoolId
17803
+
17804
+ def _restore_defaults(self) -> None:
17805
+ self.defaultExecutorPoolId = str()
17806
+
17807
+
17808
+ @property
17809
+ def defaultExecutorPoolId(self) -> str:
17810
+ """The default pool to use for tasks that require remote executors.
17811
+ References 'id' in com.linkedin.pegasus2avro.metadata.key.RemoteExecutorPoolKey"""
17812
+ return self._inner_dict.get('defaultExecutorPoolId') # type: ignore
17813
+
17814
+ @defaultExecutorPoolId.setter
17815
+ def defaultExecutorPoolId(self, value: str) -> None:
17816
+ self._inner_dict['defaultExecutorPoolId'] = value
17817
+
17818
+
17453
17819
  class RemoteExecutorPoolInfoClass(_Aspect):
17454
17820
  # No docs available.
17455
17821
 
@@ -17460,13 +17826,28 @@ class RemoteExecutorPoolInfoClass(_Aspect):
17460
17826
 
17461
17827
  def __init__(self,
17462
17828
  createdAt: int,
17829
+ creator: Union[None, str]=None,
17830
+ description: Union[None, str]=None,
17831
+ queueUrl: Union[None, str]=None,
17832
+ isEmbedded: Union[None, bool]=None,
17833
+ state: Union[None, "RemoteExecutorPoolStateClass"]=None,
17463
17834
  ):
17464
17835
  super().__init__()
17465
17836
 
17466
17837
  self.createdAt = createdAt
17838
+ self.creator = creator
17839
+ self.description = description
17840
+ self.queueUrl = queueUrl
17841
+ self.isEmbedded = isEmbedded
17842
+ self.state = state
17467
17843
 
17468
17844
  def _restore_defaults(self) -> None:
17469
17845
  self.createdAt = int()
17846
+ self.creator = self.RECORD_SCHEMA.fields_dict["creator"].default
17847
+ self.description = self.RECORD_SCHEMA.fields_dict["description"].default
17848
+ self.queueUrl = self.RECORD_SCHEMA.fields_dict["queueUrl"].default
17849
+ self.isEmbedded = self.RECORD_SCHEMA.fields_dict["isEmbedded"].default
17850
+ self.state = self.RECORD_SCHEMA.fields_dict["state"].default
17470
17851
 
17471
17852
 
17472
17853
  @property
@@ -17479,6 +17860,111 @@ class RemoteExecutorPoolInfoClass(_Aspect):
17479
17860
  self._inner_dict['createdAt'] = value
17480
17861
 
17481
17862
 
17863
+ @property
17864
+ def creator(self) -> Union[None, str]:
17865
+ """The creator of this pool"""
17866
+ return self._inner_dict.get('creator') # type: ignore
17867
+
17868
+ @creator.setter
17869
+ def creator(self, value: Union[None, str]) -> None:
17870
+ self._inner_dict['creator'] = value
17871
+
17872
+
17873
+ @property
17874
+ def description(self) -> Union[None, str]:
17875
+ """A description for this pool"""
17876
+ return self._inner_dict.get('description') # type: ignore
17877
+
17878
+ @description.setter
17879
+ def description(self, value: Union[None, str]) -> None:
17880
+ self._inner_dict['description'] = value
17881
+
17882
+
17883
+ @property
17884
+ def queueUrl(self) -> Union[None, str]:
17885
+ """The url to the task queue for this pool. I.e., SQS queue url."""
17886
+ return self._inner_dict.get('queueUrl') # type: ignore
17887
+
17888
+ @queueUrl.setter
17889
+ def queueUrl(self, value: Union[None, str]) -> None:
17890
+ self._inner_dict['queueUrl'] = value
17891
+
17892
+
17893
+ @property
17894
+ def isEmbedded(self) -> Union[None, bool]:
17895
+ """Only set true if this is the pool embedded within the DataHub Cloud deployment"""
17896
+ return self._inner_dict.get('isEmbedded') # type: ignore
17897
+
17898
+ @isEmbedded.setter
17899
+ def isEmbedded(self, value: Union[None, bool]) -> None:
17900
+ self._inner_dict['isEmbedded'] = value
17901
+
17902
+
17903
+ @property
17904
+ def state(self) -> Union[None, "RemoteExecutorPoolStateClass"]:
17905
+ """The status of the remote executor pool"""
17906
+ return self._inner_dict.get('state') # type: ignore
17907
+
17908
+ @state.setter
17909
+ def state(self, value: Union[None, "RemoteExecutorPoolStateClass"]) -> None:
17910
+ self._inner_dict['state'] = value
17911
+
17912
+
17913
+ class RemoteExecutorPoolStateClass(DictWrapper):
17914
+ # No docs available.
17915
+
17916
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.executorpool.RemoteExecutorPoolState")
17917
+ def __init__(self,
17918
+ status: Union[str, "RemoteExecutorPoolStatusClass"],
17919
+ message: Union[None, str]=None,
17920
+ ):
17921
+ super().__init__()
17922
+
17923
+ self.status = status
17924
+ self.message = message
17925
+
17926
+ def _restore_defaults(self) -> None:
17927
+ self.status = RemoteExecutorPoolStatusClass.PROVISIONING_PENDING
17928
+ self.message = self.RECORD_SCHEMA.fields_dict["message"].default
17929
+
17930
+
17931
+ @property
17932
+ def status(self) -> Union[str, "RemoteExecutorPoolStatusClass"]:
17933
+ """The status of the remote executor pool"""
17934
+ return self._inner_dict.get('status') # type: ignore
17935
+
17936
+ @status.setter
17937
+ def status(self, value: Union[str, "RemoteExecutorPoolStatusClass"]) -> None:
17938
+ self._inner_dict['status'] = value
17939
+
17940
+
17941
+ @property
17942
+ def message(self) -> Union[None, str]:
17943
+ """The message associated with the status. I.e., an error message explaining failure."""
17944
+ return self._inner_dict.get('message') # type: ignore
17945
+
17946
+ @message.setter
17947
+ def message(self, value: Union[None, str]) -> None:
17948
+ self._inner_dict['message'] = value
17949
+
17950
+
17951
+ class RemoteExecutorPoolStatusClass(object):
17952
+ # No docs available.
17953
+
17954
+ PROVISIONING_PENDING = "PROVISIONING_PENDING"
17955
+ """The pool is pending provisioning. Default state on creation."""
17956
+
17957
+ PROVISIONING_IN_PROGRESS = "PROVISIONING_IN_PROGRESS"
17958
+ """The pool has been picked up by DataHub and is in the process of being provisioned."""
17959
+
17960
+ PROVISIONING_FAILED = "PROVISIONING_FAILED"
17961
+ """The pool has failed to be provisioned."""
17962
+
17963
+ READY = "READY"
17964
+ """The pool has been successfully provisioned and is ready to accept tasks."""
17965
+
17966
+
17967
+
17482
17968
  class DomainParamsClass(DictWrapper):
17483
17969
  """Information specific to glossary terms prompts."""
17484
17970
 
@@ -20662,7 +21148,7 @@ class DataHubIngestionSourceConfigClass(DictWrapper):
20662
21148
 
20663
21149
  @property
20664
21150
  def executorId(self) -> Union[None, str]:
20665
- """The id of the executor to use to execute the ingestion run"""
21151
+ """The id of the executor pool to use to execute the ingestion run"""
20666
21152
  return self._inner_dict.get('executorId') # type: ignore
20667
21153
 
20668
21154
  @executorId.setter
@@ -21015,7 +21501,7 @@ class ChartKeyClass(_Aspect):
21015
21501
 
21016
21502
 
21017
21503
  ASPECT_NAME = 'chartKey'
21018
- ASPECT_INFO = {'keyForEntity': 'chart', 'entityCategory': 'core', 'entityAspects': ['chartInfo', 'editableChartProperties', 'chartQuery', 'inputFields', 'usageFeatures', 'lineageFeatures', 'chartUsageStatistics', 'embed', 'proposals', 'browsePaths', 'domains', 'container', 'deprecation', 'ownership', 'status', 'institutionalMemory', 'dataPlatformInstance', 'globalTags', 'glossaryTerms', 'browsePathsV2', 'subTypes', 'structuredProperties', 'incidentsSummary', 'forms', 'testResults', 'share', 'origin', 'documentation']}
21504
+ ASPECT_INFO = {'keyForEntity': 'chart', 'entityCategory': 'core', 'entityAspects': ['chartInfo', 'editableChartProperties', 'chartQuery', 'inputFields', 'chartUsageStatistics', 'embed', 'browsePaths', 'domains', 'container', 'deprecation', 'ownership', 'status', 'institutionalMemory', 'dataPlatformInstance', 'globalTags', 'glossaryTerms', 'browsePathsV2', 'subTypes', 'structuredProperties', 'incidentsSummary', 'forms', 'testResults', 'usageFeatures', 'lineageFeatures', 'proposals', 'share', 'origin', 'documentation']}
21019
21505
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.ChartKey")
21020
21506
 
21021
21507
  def __init__(self,
@@ -21086,7 +21572,7 @@ class ContainerKeyClass(_Aspect):
21086
21572
 
21087
21573
 
21088
21574
  ASPECT_NAME = 'containerKey'
21089
- ASPECT_INFO = {'keyForEntity': 'container', 'entityCategory': 'core', 'entityAspects': ['containerProperties', 'editableContainerProperties', 'dataPlatformInstance', 'subTypes', 'ownership', 'deprecation', 'container', 'globalTags', 'glossaryTerms', 'institutionalMemory', 'browsePaths', 'status', 'domains', 'proposals', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'access', 'share', 'origin', 'documentation'], 'entityDoc': 'A container of related data assets.'}
21575
+ ASPECT_INFO = {'keyForEntity': 'container', 'entityCategory': 'core', 'entityAspects': ['containerProperties', 'editableContainerProperties', 'dataPlatformInstance', 'subTypes', 'ownership', 'deprecation', 'container', 'globalTags', 'glossaryTerms', 'institutionalMemory', 'browsePaths', 'status', 'domains', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'access', 'proposals', 'share', 'origin', 'documentation'], 'entityDoc': 'A container of related data assets.'}
21090
21576
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.ContainerKey")
21091
21577
 
21092
21578
  def __init__(self,
@@ -21115,7 +21601,7 @@ class CorpGroupKeyClass(_Aspect):
21115
21601
 
21116
21602
 
21117
21603
  ASPECT_NAME = 'corpGroupKey'
21118
- ASPECT_INFO = {'keyForEntity': 'corpGroup', 'entityCategory': '_unset_', 'entityAspects': ['corpGroupInfo', 'corpGroupEditableInfo', 'globalTags', 'ownership', 'status', 'origin', 'corpGroupSettings', 'roleMembership', 'structuredProperties', 'forms', 'testResults', 'share'], 'entityDoc': 'CorpGroup represents an identity of a group of users in the enterprise.'}
21604
+ ASPECT_INFO = {'keyForEntity': 'corpGroup', 'entityCategory': '_unset_', 'entityAspects': ['corpGroupInfo', 'corpGroupEditableInfo', 'globalTags', 'ownership', 'status', 'origin', 'roleMembership', 'structuredProperties', 'forms', 'testResults', 'corpGroupSettings', 'share'], 'entityDoc': 'CorpGroup represents an identity of a group of users in the enterprise.'}
21119
21605
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.CorpGroupKey")
21120
21606
 
21121
21607
  def __init__(self,
@@ -21173,7 +21659,7 @@ class DashboardKeyClass(_Aspect):
21173
21659
 
21174
21660
 
21175
21661
  ASPECT_NAME = 'dashboardKey'
21176
- ASPECT_INFO = {'keyForEntity': 'dashboard', 'entityCategory': '_unset_', 'entityAspects': ['domains', 'container', 'deprecation', 'dashboardUsageStatistics', 'inputFields', 'usageFeatures', 'lineageFeatures', 'subTypes', 'embed', 'proposals', 'dashboardInfo', 'editableDashboardProperties', 'ownership', 'status', 'globalTags', 'browsePaths', 'glossaryTerms', 'institutionalMemory', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'incidentsSummary', 'forms', 'testResults', 'share', 'origin', 'documentation']}
21662
+ ASPECT_INFO = {'keyForEntity': 'dashboard', 'entityCategory': '_unset_', 'entityAspects': ['domains', 'container', 'deprecation', 'dashboardUsageStatistics', 'inputFields', 'subTypes', 'embed', 'dashboardInfo', 'editableDashboardProperties', 'ownership', 'status', 'globalTags', 'browsePaths', 'glossaryTerms', 'institutionalMemory', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'incidentsSummary', 'forms', 'testResults', 'usageFeatures', 'lineageFeatures', 'proposals', 'share', 'origin', 'documentation']}
21177
21663
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DashboardKey")
21178
21664
 
21179
21665
  def __init__(self,
@@ -21244,7 +21730,7 @@ class DataFlowKeyClass(_Aspect):
21244
21730
 
21245
21731
 
21246
21732
  ASPECT_NAME = 'dataFlowKey'
21247
- ASPECT_INFO = {'keyForEntity': 'dataFlow', 'entityCategory': 'core', 'entityAspects': ['domains', 'deprecation', 'versionInfo', 'proposals', 'dataFlowInfo', 'editableDataFlowProperties', 'ownership', 'status', 'globalTags', 'browsePaths', 'glossaryTerms', 'institutionalMemory', 'dataPlatformInstance', 'container', 'browsePathsV2', 'structuredProperties', 'incidentsSummary', 'forms', 'subTypes', 'testResults', 'share', 'origin', 'lineageFeatures', 'documentation']}
21733
+ ASPECT_INFO = {'keyForEntity': 'dataFlow', 'entityCategory': 'core', 'entityAspects': ['domains', 'deprecation', 'versionInfo', 'dataFlowInfo', 'editableDataFlowProperties', 'ownership', 'status', 'globalTags', 'browsePaths', 'glossaryTerms', 'institutionalMemory', 'dataPlatformInstance', 'container', 'browsePathsV2', 'structuredProperties', 'incidentsSummary', 'forms', 'subTypes', 'testResults', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation']}
21248
21734
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DataFlowKey")
21249
21735
 
21250
21736
  def __init__(self,
@@ -21357,7 +21843,7 @@ class DataHubConnectionKeyClass(_Aspect):
21357
21843
 
21358
21844
 
21359
21845
  ASPECT_NAME = 'dataHubConnectionKey'
21360
- ASPECT_INFO = {'keyForEntity': 'dataHubConnection', 'entityCategory': 'internal', 'entityAspects': ['dataHubConnectionDetails', 'dataPlatformInstance']}
21846
+ ASPECT_INFO = {'keyForEntity': 'dataHubConnection', 'entityCategory': 'internal', 'entityAspects': ['dataHubConnectionDetails', 'dataPlatformInstance', 'status']}
21361
21847
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DataHubConnectionKey")
21362
21848
 
21363
21849
  def __init__(self,
@@ -21410,6 +21896,35 @@ class DataHubIngestionSourceKeyClass(_Aspect):
21410
21896
  self._inner_dict['id'] = value
21411
21897
 
21412
21898
 
21899
+ class DataHubMetricCubeKeyClass(_Aspect):
21900
+ """Key for a DataHub Metric Cube, e.g. an internal metric."""
21901
+
21902
+
21903
+ ASPECT_NAME = 'dataHubMetricCubeKey'
21904
+ ASPECT_INFO = {'keyForEntity': 'dataHubMetricCube', 'entityCategory': 'internal', 'entityAspects': ['dataHubMetricCubeDefinition', 'dataHubMetricCubeEvent']}
21905
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DataHubMetricCubeKey")
21906
+
21907
+ def __init__(self,
21908
+ id: str,
21909
+ ):
21910
+ super().__init__()
21911
+
21912
+ self.id = id
21913
+
21914
+ def _restore_defaults(self) -> None:
21915
+ self.id = str()
21916
+
21917
+
21918
+ @property
21919
+ def id(self) -> str:
21920
+ """Unique id for the cube type."""
21921
+ return self._inner_dict.get('id') # type: ignore
21922
+
21923
+ @id.setter
21924
+ def id(self, value: str) -> None:
21925
+ self._inner_dict['id'] = value
21926
+
21927
+
21413
21928
  class DataHubPersonaKeyClass(_Aspect):
21414
21929
  """Key for a persona type"""
21415
21930
 
@@ -21660,7 +22175,7 @@ class DataJobKeyClass(_Aspect):
21660
22175
 
21661
22176
 
21662
22177
  ASPECT_NAME = 'dataJobKey'
21663
- ASPECT_INFO = {'keyForEntity': 'dataJob', 'entityCategory': '_unset_', 'entityAspects': ['datahubIngestionRunSummary', 'datahubIngestionCheckpoint', 'domains', 'deprecation', 'versionInfo', 'proposals', 'dataJobInfo', 'dataJobInputOutput', 'editableDataJobProperties', 'ownership', 'status', 'globalTags', 'browsePaths', 'glossaryTerms', 'institutionalMemory', 'dataPlatformInstance', 'container', 'browsePathsV2', 'structuredProperties', 'forms', 'anomaliesSummary', 'subTypes', 'incidentsSummary', 'testResults', 'share', 'origin', 'lineageFeatures', 'documentation', 'dataTransformLogic']}
22178
+ ASPECT_INFO = {'keyForEntity': 'dataJob', 'entityCategory': '_unset_', 'entityAspects': ['datahubIngestionRunSummary', 'datahubIngestionCheckpoint', 'domains', 'deprecation', 'versionInfo', 'dataJobInfo', 'dataJobInputOutput', 'editableDataJobProperties', 'ownership', 'status', 'globalTags', 'browsePaths', 'glossaryTerms', 'institutionalMemory', 'dataPlatformInstance', 'container', 'browsePathsV2', 'structuredProperties', 'forms', 'subTypes', 'incidentsSummary', 'testResults', 'dataTransformLogic', 'proposals', 'anomaliesSummary', 'share', 'origin', 'lineageFeatures', 'documentation']}
21664
22179
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DataJobKey")
21665
22180
 
21666
22181
  def __init__(self,
@@ -21702,7 +22217,7 @@ class DataPlatformInstanceKeyClass(_Aspect):
21702
22217
 
21703
22218
 
21704
22219
  ASPECT_NAME = 'dataPlatformInstanceKey'
21705
- ASPECT_INFO = {'keyForEntity': 'dataPlatformInstance', 'entityCategory': 'internal', 'entityAspects': ['dataPlatformInstanceProperties', 'ownership', 'globalTags', 'institutionalMemory', 'deprecation', 'status']}
22220
+ ASPECT_INFO = {'keyForEntity': 'dataPlatformInstance', 'entityCategory': 'internal', 'entityAspects': ['dataPlatformInstanceProperties', 'ownership', 'globalTags', 'institutionalMemory', 'deprecation', 'status', 'icebergWarehouseInfo']}
21706
22221
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DataPlatformInstanceKey")
21707
22222
 
21708
22223
  def __init__(self,
@@ -21858,7 +22373,7 @@ class DatasetKeyClass(_Aspect):
21858
22373
 
21859
22374
 
21860
22375
  ASPECT_NAME = 'datasetKey'
21861
- ASPECT_INFO = {'keyForEntity': 'dataset', 'entityCategory': 'core', 'entityAspects': ['viewProperties', 'subTypes', 'datasetProfile', 'datasetUsageStatistics', 'operation', 'domains', 'proposals', 'schemaProposals', 'schemaMetadata', 'status', 'container', 'deprecation', 'usageFeatures', 'storageFeatures', 'lineageFeatures', 'testResults', 'siblings', 'embed', 'incidentsSummary', 'inferredNeighbors', 'inferredMetadata', 'schemaFieldsInferredMetadata', 'schemaFieldsInferredNeighbors', 'assertionsSummary', 'datasetProperties', 'editableDatasetProperties', 'datasetDeprecation', 'datasetUpstreamLineage', 'upstreamLineage', 'institutionalMemory', 'ownership', 'editableSchemaMetadata', 'globalTags', 'glossaryTerms', 'browsePaths', 'dataPlatformInstance', 'browsePathsV2', 'anomaliesSummary', 'access', 'structuredProperties', 'forms', 'partitionsSummary', 'share', 'origin', 'documentation', 'entityInferenceMetadata', 'versionProperties'], 'entityDoc': 'Datasets represent logical or physical data assets stored or represented in various data platforms. Tables, Views, Streams are all instances of datasets.'}
22376
+ ASPECT_INFO = {'keyForEntity': 'dataset', 'entityCategory': 'core', 'entityAspects': ['viewProperties', 'subTypes', 'datasetProfile', 'datasetUsageStatistics', 'operation', 'domains', 'schemaMetadata', 'status', 'container', 'deprecation', 'testResults', 'siblings', 'embed', 'incidentsSummary', 'datasetProperties', 'editableDatasetProperties', 'datasetDeprecation', 'datasetUpstreamLineage', 'upstreamLineage', 'institutionalMemory', 'ownership', 'editableSchemaMetadata', 'globalTags', 'glossaryTerms', 'browsePaths', 'dataPlatformInstance', 'browsePathsV2', 'access', 'structuredProperties', 'forms', 'partitionsSummary', 'versionProperties', 'icebergCatalogInfo', 'inferredNeighbors', 'inferredMetadata', 'schemaFieldsInferredMetadata', 'schemaFieldsInferredNeighbors', 'assertionsSummary', 'usageFeatures', 'storageFeatures', 'lineageFeatures', 'proposals', 'schemaProposals', 'anomaliesSummary', 'share', 'origin', 'documentation', 'entityInferenceMetadata'], 'entityDoc': 'Datasets represent logical or physical data assets stored or represented in various data platforms. Tables, Views, Streams are all instances of datasets.'}
21862
22377
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DatasetKey")
21863
22378
 
21864
22379
  def __init__(self,
@@ -22235,7 +22750,7 @@ class MLFeatureKeyClass(_Aspect):
22235
22750
 
22236
22751
 
22237
22752
  ASPECT_NAME = 'mlFeatureKey'
22238
- ASPECT_INFO = {'keyForEntity': 'mlFeature', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlFeatureProperties', 'domains', 'proposals', 'mlFeatureProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'share', 'origin', 'lineageFeatures', 'documentation', 'incidentsSummary']}
22753
+ ASPECT_INFO = {'keyForEntity': 'mlFeature', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlFeatureProperties', 'domains', 'mlFeatureProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation', 'incidentsSummary']}
22239
22754
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLFeatureKey")
22240
22755
 
22241
22756
  def __init__(self,
@@ -22277,7 +22792,7 @@ class MLFeatureTableKeyClass(_Aspect):
22277
22792
 
22278
22793
 
22279
22794
  ASPECT_NAME = 'mlFeatureTableKey'
22280
- ASPECT_INFO = {'keyForEntity': 'mlFeatureTable', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlFeatureTableProperties', 'domains', 'proposals', 'mlFeatureTableProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'share', 'origin', 'lineageFeatures', 'documentation']}
22795
+ ASPECT_INFO = {'keyForEntity': 'mlFeatureTable', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlFeatureTableProperties', 'domains', 'mlFeatureTableProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation']}
22281
22796
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLFeatureTableKey")
22282
22797
 
22283
22798
  def __init__(self,
@@ -22374,7 +22889,7 @@ class MLModelGroupKeyClass(_Aspect):
22374
22889
 
22375
22890
 
22376
22891
  ASPECT_NAME = 'mlModelGroupKey'
22377
- ASPECT_INFO = {'keyForEntity': 'mlModelGroup', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlModelGroupProperties', 'domains', 'proposals', 'mlModelGroupProperties', 'ownership', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'share', 'origin', 'lineageFeatures', 'documentation']}
22892
+ ASPECT_INFO = {'keyForEntity': 'mlModelGroup', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlModelGroupProperties', 'domains', 'mlModelGroupProperties', 'ownership', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation']}
22378
22893
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLModelGroupKey")
22379
22894
 
22380
22895
  def __init__(self,
@@ -22429,7 +22944,7 @@ class MLModelKeyClass(_Aspect):
22429
22944
 
22430
22945
 
22431
22946
  ASPECT_NAME = 'mlModelKey'
22432
- ASPECT_INFO = {'keyForEntity': 'mlModel', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlModelProperties', 'domains', 'proposals', 'ownership', 'mlModelProperties', 'intendedUse', 'mlModelFactorPrompts', 'mlModelMetrics', 'mlModelEvaluationData', 'mlModelTrainingData', 'mlModelQuantitativeAnalyses', 'mlModelEthicalConsiderations', 'mlModelCaveatsAndRecommendations', 'institutionalMemory', 'sourceCode', 'status', 'cost', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'share', 'origin', 'lineageFeatures', 'documentation', 'incidentsSummary', 'versionProperties']}
22947
+ ASPECT_INFO = {'keyForEntity': 'mlModel', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlModelProperties', 'domains', 'ownership', 'mlModelProperties', 'intendedUse', 'mlModelFactorPrompts', 'mlModelMetrics', 'mlModelEvaluationData', 'mlModelTrainingData', 'mlModelQuantitativeAnalyses', 'mlModelEthicalConsiderations', 'mlModelCaveatsAndRecommendations', 'institutionalMemory', 'sourceCode', 'status', 'cost', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'versionProperties', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation', 'incidentsSummary']}
22433
22948
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLModelKey")
22434
22949
 
22435
22950
  def __init__(self,
@@ -22484,7 +22999,7 @@ class MLPrimaryKeyKeyClass(_Aspect):
22484
22999
 
22485
23000
 
22486
23001
  ASPECT_NAME = 'mlPrimaryKeyKey'
22487
- ASPECT_INFO = {'keyForEntity': 'mlPrimaryKey', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlPrimaryKeyProperties', 'domains', 'proposals', 'mlPrimaryKeyProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'globalTags', 'dataPlatformInstance', 'structuredProperties', 'forms', 'testResults', 'share', 'origin', 'lineageFeatures']}
23002
+ ASPECT_INFO = {'keyForEntity': 'mlPrimaryKey', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlPrimaryKeyProperties', 'domains', 'mlPrimaryKeyProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'globalTags', 'dataPlatformInstance', 'structuredProperties', 'forms', 'testResults', 'proposals', 'share', 'origin', 'lineageFeatures']}
22488
23003
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLPrimaryKeyKey")
22489
23004
 
22490
23005
  def __init__(self,
@@ -22568,7 +23083,7 @@ class NotebookKeyClass(_Aspect):
22568
23083
 
22569
23084
 
22570
23085
  ASPECT_NAME = 'notebookKey'
22571
- ASPECT_INFO = {'keyForEntity': 'notebook', 'entityCategory': '_unset_', 'entityAspects': ['notebookInfo', 'notebookContent', 'editableNotebookProperties', 'ownership', 'status', 'globalTags', 'glossaryTerms', 'browsePaths', 'institutionalMemory', 'domains', 'subTypes', 'dataPlatformInstance', 'proposals', 'browsePathsV2', 'testResults', 'share', 'origin', 'documentation'], 'entityDoc': 'Notebook represents a combination of query, text, chart and etc. This is in BETA version'}
23086
+ ASPECT_INFO = {'keyForEntity': 'notebook', 'entityCategory': '_unset_', 'entityAspects': ['notebookInfo', 'notebookContent', 'editableNotebookProperties', 'ownership', 'status', 'globalTags', 'glossaryTerms', 'browsePaths', 'institutionalMemory', 'domains', 'subTypes', 'dataPlatformInstance', 'browsePathsV2', 'testResults', 'proposals', 'share', 'origin', 'documentation'], 'entityDoc': 'Notebook represents a combination of query, text, chart and etc. This is in BETA version'}
22572
23087
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.NotebookKey")
22573
23088
 
22574
23089
  def __init__(self,
@@ -22735,6 +23250,36 @@ class RecommendationModuleKeyClass(_Aspect):
22735
23250
  self._inner_dict['identifier'] = value
22736
23251
 
22737
23252
 
23253
+ class RemoteExecutorGlobalConfigKeyClass(_Aspect):
23254
+ """Key for the *Singleton* DataHub Remote Executor Global Config"""
23255
+
23256
+
23257
+ ASPECT_NAME = 'dataHubRemoteExecutorGlobalConfigKey'
23258
+ ASPECT_INFO = {'keyForEntity': 'dataHubRemoteExecutorGlobalConfig', 'entityCategory': 'internal', 'entityAspects': ['dataHubRemoteExecutorPoolGlobalConfig']}
23259
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.RemoteExecutorGlobalConfigKey")
23260
+
23261
+ def __init__(self,
23262
+ id: str,
23263
+ ):
23264
+ super().__init__()
23265
+
23266
+ self.id = id
23267
+
23268
+ def _restore_defaults(self) -> None:
23269
+ self.id = str()
23270
+
23271
+
23272
+ @property
23273
+ def id(self) -> str:
23274
+ """The unique identifier for the remote executor global config
23275
+ NOTE: since this a singleton, there should be a hardcoded key in the AcrylConstants file"""
23276
+ return self._inner_dict.get('id') # type: ignore
23277
+
23278
+ @id.setter
23279
+ def id(self, value: str) -> None:
23280
+ self._inner_dict['id'] = value
23281
+
23282
+
22738
23283
  class RemoteExecutorKeyClass(_Aspect):
22739
23284
  """Key for an DataHub Remote Executor"""
22740
23285
 
@@ -22773,24 +23318,24 @@ class RemoteExecutorPoolKeyClass(_Aspect):
22773
23318
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.RemoteExecutorPoolKey")
22774
23319
 
22775
23320
  def __init__(self,
22776
- name: str,
23321
+ id: str,
22777
23322
  ):
22778
23323
  super().__init__()
22779
23324
 
22780
- self.name = name
23325
+ self.id = id
22781
23326
 
22782
23327
  def _restore_defaults(self) -> None:
22783
- self.name = str()
23328
+ self.id = str()
22784
23329
 
22785
23330
 
22786
23331
  @property
22787
- def name(self) -> str:
23332
+ def id(self) -> str:
22788
23333
  """The unique identifier for the remote executor pool"""
22789
- return self._inner_dict.get('name') # type: ignore
23334
+ return self._inner_dict.get('id') # type: ignore
22790
23335
 
22791
- @name.setter
22792
- def name(self, value: str) -> None:
22793
- self._inner_dict['name'] = value
23336
+ @id.setter
23337
+ def id(self, value: str) -> None:
23338
+ self._inner_dict['id'] = value
22794
23339
 
22795
23340
 
22796
23341
  class RoleKeyClass(_Aspect):
@@ -22827,7 +23372,7 @@ class SchemaFieldKeyClass(_Aspect):
22827
23372
 
22828
23373
 
22829
23374
  ASPECT_NAME = 'schemaFieldKey'
22830
- ASPECT_INFO = {'keyForEntity': 'schemaField', 'entityCategory': 'core', 'entityAspects': ['schemafieldInfo', 'structuredProperties', 'forms', 'businessAttributes', 'status', 'schemaFieldAliases', 'documentation', 'testResults', 'schemaFieldProfile', 'lineageFeatures', 'deprecation']}
23375
+ ASPECT_INFO = {'keyForEntity': 'schemaField', 'entityCategory': 'core', 'entityAspects': ['schemafieldInfo', 'structuredProperties', 'forms', 'businessAttributes', 'status', 'schemaFieldAliases', 'documentation', 'testResults', 'deprecation', 'schemaFieldProfile', 'lineageFeatures']}
22831
23376
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.SchemaFieldKey")
22832
23377
 
22833
23378
  def __init__(self,
@@ -24859,6 +25404,437 @@ class TagSnapshotClass(DictWrapper):
24859
25404
  self._inner_dict['aspects'] = value
24860
25405
 
24861
25406
 
25407
+ class DataHubMetricCubeDefinitionClass(_Aspect):
25408
+ """The structure of an individual metric cube in DataHub."""
25409
+
25410
+
25411
+ ASPECT_NAME = 'dataHubMetricCubeDefinition'
25412
+ ASPECT_INFO = {}
25413
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeDefinition")
25414
+
25415
+ def __init__(self,
25416
+ name: str,
25417
+ origin: "DataHubMetricCubeOriginClass",
25418
+ type: Optional[str]=None,
25419
+ description: Union[None, str]=None,
25420
+ entity: Union[None, str]=None,
25421
+ measures: Union[None, "DataHubMetricCubeMeasuresClass"]=None,
25422
+ dimensions: Union[None, "DataHubMetricCubeDimensionsClass"]=None,
25423
+ ):
25424
+ super().__init__()
25425
+
25426
+ self.name = name
25427
+ if type is None:
25428
+ # default: 'custom'
25429
+ self.type = self.RECORD_SCHEMA.fields_dict["type"].default
25430
+ else:
25431
+ self.type = type
25432
+ self.description = description
25433
+ self.entity = entity
25434
+ self.origin = origin
25435
+ self.measures = measures
25436
+ self.dimensions = dimensions
25437
+
25438
+ def _restore_defaults(self) -> None:
25439
+ self.name = str()
25440
+ self.type = self.RECORD_SCHEMA.fields_dict["type"].default
25441
+ self.description = self.RECORD_SCHEMA.fields_dict["description"].default
25442
+ self.entity = self.RECORD_SCHEMA.fields_dict["entity"].default
25443
+ self.origin = DataHubMetricCubeOriginClass._construct_with_defaults()
25444
+ self.measures = self.RECORD_SCHEMA.fields_dict["measures"].default
25445
+ self.dimensions = self.RECORD_SCHEMA.fields_dict["dimensions"].default
25446
+
25447
+
25448
+ @property
25449
+ def name(self) -> str:
25450
+ """ Display name of the metric cube"""
25451
+ return self._inner_dict.get('name') # type: ignore
25452
+
25453
+ @name.setter
25454
+ def name(self, value: str) -> None:
25455
+ self._inner_dict['name'] = value
25456
+
25457
+
25458
+ @property
25459
+ def type(self) -> str:
25460
+ """A type or category for the metric cube. This is used to categorize the metric cube & for filtering.
25461
+
25462
+ This may be used to group similar types of metrics for a given entity, e.g. 'row_count', 'error_count', etc.
25463
+ that originated in different places."""
25464
+ return self._inner_dict.get('type') # type: ignore
25465
+
25466
+ @type.setter
25467
+ def type(self, value: str) -> None:
25468
+ self._inner_dict['type'] = value
25469
+
25470
+
25471
+ @property
25472
+ def description(self) -> Union[None, str]:
25473
+ """ Optional description for the metric"""
25474
+ return self._inner_dict.get('description') # type: ignore
25475
+
25476
+ @description.setter
25477
+ def description(self, value: Union[None, str]) -> None:
25478
+ self._inner_dict['description'] = value
25479
+
25480
+
25481
+ @property
25482
+ def entity(self) -> Union[None, str]:
25483
+ """ An optional URN for the entity that this metric cube is associated with."""
25484
+ return self._inner_dict.get('entity') # type: ignore
25485
+
25486
+ @entity.setter
25487
+ def entity(self, value: Union[None, str]) -> None:
25488
+ self._inner_dict['entity'] = value
25489
+
25490
+
25491
+ @property
25492
+ def origin(self) -> "DataHubMetricCubeOriginClass":
25493
+ """The origin of the metric cube."""
25494
+ return self._inner_dict.get('origin') # type: ignore
25495
+
25496
+ @origin.setter
25497
+ def origin(self, value: "DataHubMetricCubeOriginClass") -> None:
25498
+ self._inner_dict['origin'] = value
25499
+
25500
+
25501
+ @property
25502
+ def measures(self) -> Union[None, "DataHubMetricCubeMeasuresClass"]:
25503
+ """ Optional - The measures of the cube for display purposes."""
25504
+ return self._inner_dict.get('measures') # type: ignore
25505
+
25506
+ @measures.setter
25507
+ def measures(self, value: Union[None, "DataHubMetricCubeMeasuresClass"]) -> None:
25508
+ self._inner_dict['measures'] = value
25509
+
25510
+
25511
+ @property
25512
+ def dimensions(self) -> Union[None, "DataHubMetricCubeDimensionsClass"]:
25513
+ """Optional - The dimensions of the cube for display purposes."""
25514
+ return self._inner_dict.get('dimensions') # type: ignore
25515
+
25516
+ @dimensions.setter
25517
+ def dimensions(self, value: Union[None, "DataHubMetricCubeDimensionsClass"]) -> None:
25518
+ self._inner_dict['dimensions'] = value
25519
+
25520
+
25521
+ class DataHubMetricCubeDimensionClass(DictWrapper):
25522
+ """The definition of a metric cube dimension."""
25523
+
25524
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeDimension")
25525
+ def __init__(self,
25526
+ name: str,
25527
+ ):
25528
+ super().__init__()
25529
+
25530
+ self.name = name
25531
+
25532
+ def _restore_defaults(self) -> None:
25533
+ self.name = str()
25534
+
25535
+
25536
+ @property
25537
+ def name(self) -> str:
25538
+ """ The name of the dimension"""
25539
+ return self._inner_dict.get('name') # type: ignore
25540
+
25541
+ @name.setter
25542
+ def name(self, value: str) -> None:
25543
+ self._inner_dict['name'] = value
25544
+
25545
+
25546
+ class DataHubMetricCubeDimensionsClass(DictWrapper):
25547
+ """The dimensions of the cube. This is what you filter and group by.
25548
+ This is a record to allow for future expansion of the dimensions."""
25549
+
25550
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeDimensions")
25551
+ def __init__(self,
25552
+ dim1: Union[None, "DataHubMetricCubeDimensionClass"]=None,
25553
+ dim2: Union[None, "DataHubMetricCubeDimensionClass"]=None,
25554
+ dim3: Union[None, "DataHubMetricCubeDimensionClass"]=None,
25555
+ ):
25556
+ super().__init__()
25557
+
25558
+ self.dim1 = dim1
25559
+ self.dim2 = dim2
25560
+ self.dim3 = dim3
25561
+
25562
+ def _restore_defaults(self) -> None:
25563
+ self.dim1 = self.RECORD_SCHEMA.fields_dict["dim1"].default
25564
+ self.dim2 = self.RECORD_SCHEMA.fields_dict["dim2"].default
25565
+ self.dim3 = self.RECORD_SCHEMA.fields_dict["dim3"].default
25566
+
25567
+
25568
+ @property
25569
+ def dim1(self) -> Union[None, "DataHubMetricCubeDimensionClass"]:
25570
+ """ The first measure being tracked in the cube."""
25571
+ return self._inner_dict.get('dim1') # type: ignore
25572
+
25573
+ @dim1.setter
25574
+ def dim1(self, value: Union[None, "DataHubMetricCubeDimensionClass"]) -> None:
25575
+ self._inner_dict['dim1'] = value
25576
+
25577
+
25578
+ @property
25579
+ def dim2(self) -> Union[None, "DataHubMetricCubeDimensionClass"]:
25580
+ """Optional: A second measure being tracked in the cube."""
25581
+ return self._inner_dict.get('dim2') # type: ignore
25582
+
25583
+ @dim2.setter
25584
+ def dim2(self, value: Union[None, "DataHubMetricCubeDimensionClass"]) -> None:
25585
+ self._inner_dict['dim2'] = value
25586
+
25587
+
25588
+ @property
25589
+ def dim3(self) -> Union[None, "DataHubMetricCubeDimensionClass"]:
25590
+ """Optional: A third measure being tracked in the cube."""
25591
+ return self._inner_dict.get('dim3') # type: ignore
25592
+
25593
+ @dim3.setter
25594
+ def dim3(self, value: Union[None, "DataHubMetricCubeDimensionClass"]) -> None:
25595
+ self._inner_dict['dim3'] = value
25596
+
25597
+
25598
+ class DataHubMetricCubeEventClass(_Aspect):
25599
+ """A timeseries measure event, e.g. a single observation."""
25600
+
25601
+
25602
+ ASPECT_NAME = 'dataHubMetricCubeEvent'
25603
+ ASPECT_TYPE = 'timeseries'
25604
+ ASPECT_INFO = {}
25605
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeEvent")
25606
+
25607
+ def __init__(self,
25608
+ reportedTimeMillis: int,
25609
+ measure: float,
25610
+ timestampMillis: int,
25611
+ dim1: Union[None, List[str]]=None,
25612
+ dim2: Union[None, List[str]]=None,
25613
+ dim3: Union[None, List[str]]=None,
25614
+ eventGranularity: Union[None, "TimeWindowSizeClass"]=None,
25615
+ partitionSpec: Optional[Union["PartitionSpecClass", None]]=None,
25616
+ messageId: Union[None, str]=None,
25617
+ ):
25618
+ super().__init__()
25619
+
25620
+ self.reportedTimeMillis = reportedTimeMillis
25621
+ self.measure = measure
25622
+ self.dim1 = dim1
25623
+ self.dim2 = dim2
25624
+ self.dim3 = dim3
25625
+ self.timestampMillis = timestampMillis
25626
+ self.eventGranularity = eventGranularity
25627
+ if partitionSpec is None:
25628
+ # default: {'partition': 'FULL_TABLE_SNAPSHOT', 'type': 'FULL_TABLE', 'timePartition': None}
25629
+ self.partitionSpec = _json_converter.from_json_object(self.RECORD_SCHEMA.fields_dict["partitionSpec"].default, writers_schema=self.RECORD_SCHEMA.fields_dict["partitionSpec"].type)
25630
+ else:
25631
+ self.partitionSpec = partitionSpec
25632
+ self.messageId = messageId
25633
+
25634
+ def _restore_defaults(self) -> None:
25635
+ self.reportedTimeMillis = int()
25636
+ self.measure = float()
25637
+ self.dim1 = self.RECORD_SCHEMA.fields_dict["dim1"].default
25638
+ self.dim2 = self.RECORD_SCHEMA.fields_dict["dim2"].default
25639
+ self.dim3 = self.RECORD_SCHEMA.fields_dict["dim3"].default
25640
+ self.timestampMillis = int()
25641
+ self.eventGranularity = self.RECORD_SCHEMA.fields_dict["eventGranularity"].default
25642
+ self.partitionSpec = _json_converter.from_json_object(self.RECORD_SCHEMA.fields_dict["partitionSpec"].default, writers_schema=self.RECORD_SCHEMA.fields_dict["partitionSpec"].type)
25643
+ self.messageId = self.RECORD_SCHEMA.fields_dict["messageId"].default
25644
+
25645
+
25646
+ @property
25647
+ def reportedTimeMillis(self) -> int:
25648
+ """The event or bucket reported time field as epoch at UTC in milli seconds.
25649
+ This must be provided in order to pass validation."""
25650
+ return self._inner_dict.get('reportedTimeMillis') # type: ignore
25651
+
25652
+ @reportedTimeMillis.setter
25653
+ def reportedTimeMillis(self, value: int) -> None:
25654
+ self._inner_dict['reportedTimeMillis'] = value
25655
+
25656
+
25657
+ @property
25658
+ def measure(self) -> float:
25659
+ """The first measure value - Typically this is the primary metric."""
25660
+ return self._inner_dict.get('measure') # type: ignore
25661
+
25662
+ @measure.setter
25663
+ def measure(self, value: float) -> None:
25664
+ self._inner_dict['measure'] = value
25665
+
25666
+
25667
+ @property
25668
+ def dim1(self) -> Union[None, List[str]]:
25669
+ """The first dimension value(s). Array type to support multi-dimensionality."""
25670
+ return self._inner_dict.get('dim1') # type: ignore
25671
+
25672
+ @dim1.setter
25673
+ def dim1(self, value: Union[None, List[str]]) -> None:
25674
+ self._inner_dict['dim1'] = value
25675
+
25676
+
25677
+ @property
25678
+ def dim2(self) -> Union[None, List[str]]:
25679
+ """The second dimension value(s). Array type to support multi-dimensionality."""
25680
+ return self._inner_dict.get('dim2') # type: ignore
25681
+
25682
+ @dim2.setter
25683
+ def dim2(self, value: Union[None, List[str]]) -> None:
25684
+ self._inner_dict['dim2'] = value
25685
+
25686
+
25687
+ @property
25688
+ def dim3(self) -> Union[None, List[str]]:
25689
+ """The third dimension value(s). Array type to support multi-dimensionality."""
25690
+ return self._inner_dict.get('dim3') # type: ignore
25691
+
25692
+ @dim3.setter
25693
+ def dim3(self, value: Union[None, List[str]]) -> None:
25694
+ self._inner_dict['dim3'] = value
25695
+
25696
+
25697
+ @property
25698
+ def timestampMillis(self) -> int:
25699
+ """The event timestamp field as epoch at UTC in milli seconds."""
25700
+ return self._inner_dict.get('timestampMillis') # type: ignore
25701
+
25702
+ @timestampMillis.setter
25703
+ def timestampMillis(self, value: int) -> None:
25704
+ self._inner_dict['timestampMillis'] = value
25705
+
25706
+
25707
+ @property
25708
+ def eventGranularity(self) -> Union[None, "TimeWindowSizeClass"]:
25709
+ """Granularity of the event if applicable"""
25710
+ return self._inner_dict.get('eventGranularity') # type: ignore
25711
+
25712
+ @eventGranularity.setter
25713
+ def eventGranularity(self, value: Union[None, "TimeWindowSizeClass"]) -> None:
25714
+ self._inner_dict['eventGranularity'] = value
25715
+
25716
+
25717
+ @property
25718
+ def partitionSpec(self) -> Union["PartitionSpecClass", None]:
25719
+ """The optional partition specification."""
25720
+ return self._inner_dict.get('partitionSpec') # type: ignore
25721
+
25722
+ @partitionSpec.setter
25723
+ def partitionSpec(self, value: Union["PartitionSpecClass", None]) -> None:
25724
+ self._inner_dict['partitionSpec'] = value
25725
+
25726
+
25727
+ @property
25728
+ def messageId(self) -> Union[None, str]:
25729
+ """The optional messageId, if provided serves as a custom user-defined unique identifier for an aspect value."""
25730
+ return self._inner_dict.get('messageId') # type: ignore
25731
+
25732
+ @messageId.setter
25733
+ def messageId(self, value: Union[None, str]) -> None:
25734
+ self._inner_dict['messageId'] = value
25735
+
25736
+
25737
+ class DataHubMetricCubeMeasureClass(DictWrapper):
25738
+ """The definition of a metric cube measure."""
25739
+
25740
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeMeasure")
25741
+ def __init__(self,
25742
+ name: str,
25743
+ ):
25744
+ super().__init__()
25745
+
25746
+ self.name = name
25747
+
25748
+ def _restore_defaults(self) -> None:
25749
+ self.name = str()
25750
+
25751
+
25752
+ @property
25753
+ def name(self) -> str:
25754
+ """ The name of the measure"""
25755
+ return self._inner_dict.get('name') # type: ignore
25756
+
25757
+ @name.setter
25758
+ def name(self, value: str) -> None:
25759
+ self._inner_dict['name'] = value
25760
+
25761
+
25762
+ class DataHubMetricCubeMeasuresClass(DictWrapper):
25763
+ """The definition of the measures of a metric cube.
25764
+ A measure is a metric that is being tracked in the cube."""
25765
+
25766
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeMeasures")
25767
+ def __init__(self,
25768
+ measure: "DataHubMetricCubeMeasureClass",
25769
+ ):
25770
+ super().__init__()
25771
+
25772
+ self.measure = measure
25773
+
25774
+ def _restore_defaults(self) -> None:
25775
+ self.measure = DataHubMetricCubeMeasureClass._construct_with_defaults()
25776
+
25777
+
25778
+ @property
25779
+ def measure(self) -> "DataHubMetricCubeMeasureClass":
25780
+ """ The first measure being tracked in the cube."""
25781
+ return self._inner_dict.get('measure') # type: ignore
25782
+
25783
+ @measure.setter
25784
+ def measure(self, value: "DataHubMetricCubeMeasureClass") -> None:
25785
+ self._inner_dict['measure'] = value
25786
+
25787
+
25788
+ class DataHubMetricCubeOriginClass(DictWrapper):
25789
+ """Information about the origin of the metric cube"""
25790
+
25791
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metric.DataHubMetricCubeOrigin")
25792
+ def __init__(self,
25793
+ type: Union[str, "DataHubMetricSourceTypeClass"],
25794
+ originUrn: Union[None, str]=None,
25795
+ ):
25796
+ super().__init__()
25797
+
25798
+ self.type = type
25799
+ self.originUrn = originUrn
25800
+
25801
+ def _restore_defaults(self) -> None:
25802
+ self.type = DataHubMetricSourceTypeClass.MANUAL
25803
+ self.originUrn = self.RECORD_SCHEMA.fields_dict["originUrn"].default
25804
+
25805
+
25806
+ @property
25807
+ def type(self) -> Union[str, "DataHubMetricSourceTypeClass"]:
25808
+ """Message associated with the incident"""
25809
+ return self._inner_dict.get('type') # type: ignore
25810
+
25811
+ @type.setter
25812
+ def type(self, value: Union[str, "DataHubMetricSourceTypeClass"]) -> None:
25813
+ self._inner_dict['type'] = value
25814
+
25815
+
25816
+ @property
25817
+ def originUrn(self) -> Union[None, str]:
25818
+ """Reference to the source that created the metric.
25819
+ In the case of assertion monitor, this is the URN of the assertion monitor."""
25820
+ return self._inner_dict.get('originUrn') # type: ignore
25821
+
25822
+ @originUrn.setter
25823
+ def originUrn(self, value: Union[None, str]) -> None:
25824
+ self._inner_dict['originUrn'] = value
25825
+
25826
+
25827
+ class DataHubMetricSourceTypeClass(object):
25828
+ # No docs available.
25829
+
25830
+ MANUAL = "MANUAL"
25831
+ """Manually created metric, via UI or API."""
25832
+
25833
+ ASSERTION_MONITOR = "ASSERTION_MONITOR"
25834
+ """Assertion monitor created the metric."""
25835
+
25836
+
25837
+
24862
25838
  class BaseDataClass(DictWrapper):
24863
25839
  """BaseData record"""
24864
25840
 
@@ -27339,7 +28315,7 @@ class MonitorInfoClass(_Aspect):
27339
28315
 
27340
28316
  @property
27341
28317
  def executorId(self) -> Union[None, str]:
27342
- """Advanced: The executor ID of the remote monitor service, if any."""
28318
+ """Advanced: The executor pool id of the remote monitor service, if any."""
27343
28319
  return self._inner_dict.get('executorId') # type: ignore
27344
28320
 
27345
28321
  @executorId.setter
@@ -35195,7 +36171,10 @@ __SCHEMA_TYPES = {
35195
36171
  'com.linkedin.pegasus2avro.actionrequest.DataContractProposal': DataContractProposalClass,
35196
36172
  'com.linkedin.pegasus2avro.actionrequest.DataContractProposalOperationType': DataContractProposalOperationTypeClass,
35197
36173
  'com.linkedin.pegasus2avro.actionrequest.DescriptionProposal': DescriptionProposalClass,
36174
+ 'com.linkedin.pegasus2avro.actionrequest.DomainProposal': DomainProposalClass,
35198
36175
  'com.linkedin.pegasus2avro.actionrequest.GlossaryTermProposal': GlossaryTermProposalClass,
36176
+ 'com.linkedin.pegasus2avro.actionrequest.OwnerProposal': OwnerProposalClass,
36177
+ 'com.linkedin.pegasus2avro.actionrequest.StructuredPropertyProposal': StructuredPropertyProposalClass,
35199
36178
  'com.linkedin.pegasus2avro.actionrequest.TagProposal': TagProposalClass,
35200
36179
  'com.linkedin.pegasus2avro.ai.AiInferenceMetadata': AiInferenceMetadataClass,
35201
36180
  'com.linkedin.pegasus2avro.ai.EntityInferenceMetadata': EntityInferenceMetadataClass,
@@ -35407,6 +36386,7 @@ __SCHEMA_TYPES = {
35407
36386
  'com.linkedin.pegasus2avro.dataplatform.PlatformType': PlatformTypeClass,
35408
36387
  'com.linkedin.pegasus2avro.dataplatform.slack.SlackUserInfo': SlackUserInfoClass,
35409
36388
  'com.linkedin.pegasus2avro.dataplatforminstance.DataPlatformInstanceProperties': DataPlatformInstancePropertiesClass,
36389
+ 'com.linkedin.pegasus2avro.dataplatforminstance.IcebergWarehouseInfo': IcebergWarehouseInfoClass,
35410
36390
  'com.linkedin.pegasus2avro.dataprocess.DataProcessInfo': DataProcessInfoClass,
35411
36391
  'com.linkedin.pegasus2avro.dataprocess.DataProcessInstanceInput': DataProcessInstanceInputClass,
35412
36392
  'com.linkedin.pegasus2avro.dataprocess.DataProcessInstanceOutput': DataProcessInstanceOutputClass,
@@ -35439,6 +36419,7 @@ __SCHEMA_TYPES = {
35439
36419
  'com.linkedin.pegasus2avro.dataset.FineGrainedLineageDownstreamType': FineGrainedLineageDownstreamTypeClass,
35440
36420
  'com.linkedin.pegasus2avro.dataset.FineGrainedLineageUpstreamType': FineGrainedLineageUpstreamTypeClass,
35441
36421
  'com.linkedin.pegasus2avro.dataset.Histogram': HistogramClass,
36422
+ 'com.linkedin.pegasus2avro.dataset.IcebergCatalogInfo': IcebergCatalogInfoClass,
35442
36423
  'com.linkedin.pegasus2avro.dataset.PartitionSummary': PartitionSummaryClass,
35443
36424
  'com.linkedin.pegasus2avro.dataset.PartitionsSummary': PartitionsSummaryClass,
35444
36425
  'com.linkedin.pegasus2avro.dataset.Quantile': QuantileClass,
@@ -35477,7 +36458,10 @@ __SCHEMA_TYPES = {
35477
36458
  'com.linkedin.pegasus2avro.execution.ExecutionRequestSource': ExecutionRequestSourceClass,
35478
36459
  'com.linkedin.pegasus2avro.execution.StructuredExecutionReport': StructuredExecutionReportClass,
35479
36460
  'com.linkedin.pegasus2avro.executor.RemoteExecutorStatus': RemoteExecutorStatusClass,
36461
+ 'com.linkedin.pegasus2avro.executorglobalconfig.RemoteExecutorPoolGlobalConfig': RemoteExecutorPoolGlobalConfigClass,
35480
36462
  'com.linkedin.pegasus2avro.executorpool.RemoteExecutorPoolInfo': RemoteExecutorPoolInfoClass,
36463
+ 'com.linkedin.pegasus2avro.executorpool.RemoteExecutorPoolState': RemoteExecutorPoolStateClass,
36464
+ 'com.linkedin.pegasus2avro.executorpool.RemoteExecutorPoolStatus': RemoteExecutorPoolStatusClass,
35481
36465
  'com.linkedin.pegasus2avro.form.DomainParams': DomainParamsClass,
35482
36466
  'com.linkedin.pegasus2avro.form.DynamicFormAssignment': DynamicFormAssignmentClass,
35483
36467
  'com.linkedin.pegasus2avro.form.FormActorAssignment': FormActorAssignmentClass,
@@ -35557,6 +36541,7 @@ __SCHEMA_TYPES = {
35557
36541
  'com.linkedin.pegasus2avro.metadata.key.DataHubActionKey': DataHubActionKeyClass,
35558
36542
  'com.linkedin.pegasus2avro.metadata.key.DataHubConnectionKey': DataHubConnectionKeyClass,
35559
36543
  'com.linkedin.pegasus2avro.metadata.key.DataHubIngestionSourceKey': DataHubIngestionSourceKeyClass,
36544
+ 'com.linkedin.pegasus2avro.metadata.key.DataHubMetricCubeKey': DataHubMetricCubeKeyClass,
35560
36545
  'com.linkedin.pegasus2avro.metadata.key.DataHubPersonaKey': DataHubPersonaKeyClass,
35561
36546
  'com.linkedin.pegasus2avro.metadata.key.DataHubPolicyKey': DataHubPolicyKeyClass,
35562
36547
  'com.linkedin.pegasus2avro.metadata.key.DataHubRetentionKey': DataHubRetentionKeyClass,
@@ -35594,6 +36579,7 @@ __SCHEMA_TYPES = {
35594
36579
  'com.linkedin.pegasus2avro.metadata.key.PostKey': PostKeyClass,
35595
36580
  'com.linkedin.pegasus2avro.metadata.key.QueryKey': QueryKeyClass,
35596
36581
  'com.linkedin.pegasus2avro.metadata.key.RecommendationModuleKey': RecommendationModuleKeyClass,
36582
+ 'com.linkedin.pegasus2avro.metadata.key.RemoteExecutorGlobalConfigKey': RemoteExecutorGlobalConfigKeyClass,
35597
36583
  'com.linkedin.pegasus2avro.metadata.key.RemoteExecutorKey': RemoteExecutorKeyClass,
35598
36584
  'com.linkedin.pegasus2avro.metadata.key.RemoteExecutorPoolKey': RemoteExecutorPoolKeyClass,
35599
36585
  'com.linkedin.pegasus2avro.metadata.key.RoleKey': RoleKeyClass,
@@ -35641,6 +36627,14 @@ __SCHEMA_TYPES = {
35641
36627
  'com.linkedin.pegasus2avro.metadata.snapshot.MLPrimaryKeySnapshot': MLPrimaryKeySnapshotClass,
35642
36628
  'com.linkedin.pegasus2avro.metadata.snapshot.SchemaFieldSnapshot': SchemaFieldSnapshotClass,
35643
36629
  'com.linkedin.pegasus2avro.metadata.snapshot.TagSnapshot': TagSnapshotClass,
36630
+ 'com.linkedin.pegasus2avro.metric.DataHubMetricCubeDefinition': DataHubMetricCubeDefinitionClass,
36631
+ 'com.linkedin.pegasus2avro.metric.DataHubMetricCubeDimension': DataHubMetricCubeDimensionClass,
36632
+ 'com.linkedin.pegasus2avro.metric.DataHubMetricCubeDimensions': DataHubMetricCubeDimensionsClass,
36633
+ 'com.linkedin.pegasus2avro.metric.DataHubMetricCubeEvent': DataHubMetricCubeEventClass,
36634
+ 'com.linkedin.pegasus2avro.metric.DataHubMetricCubeMeasure': DataHubMetricCubeMeasureClass,
36635
+ 'com.linkedin.pegasus2avro.metric.DataHubMetricCubeMeasures': DataHubMetricCubeMeasuresClass,
36636
+ 'com.linkedin.pegasus2avro.metric.DataHubMetricCubeOrigin': DataHubMetricCubeOriginClass,
36637
+ 'com.linkedin.pegasus2avro.metric.DataHubMetricSourceType': DataHubMetricSourceTypeClass,
35644
36638
  'com.linkedin.pegasus2avro.ml.metadata.BaseData': BaseDataClass,
35645
36639
  'com.linkedin.pegasus2avro.ml.metadata.CaveatDetails': CaveatDetailsClass,
35646
36640
  'com.linkedin.pegasus2avro.ml.metadata.CaveatsAndRecommendations': CaveatsAndRecommendationsClass,
@@ -35866,7 +36860,10 @@ __SCHEMA_TYPES = {
35866
36860
  'DataContractProposal': DataContractProposalClass,
35867
36861
  'DataContractProposalOperationType': DataContractProposalOperationTypeClass,
35868
36862
  'DescriptionProposal': DescriptionProposalClass,
36863
+ 'DomainProposal': DomainProposalClass,
35869
36864
  'GlossaryTermProposal': GlossaryTermProposalClass,
36865
+ 'OwnerProposal': OwnerProposalClass,
36866
+ 'StructuredPropertyProposal': StructuredPropertyProposalClass,
35870
36867
  'TagProposal': TagProposalClass,
35871
36868
  'AiInferenceMetadata': AiInferenceMetadataClass,
35872
36869
  'EntityInferenceMetadata': EntityInferenceMetadataClass,
@@ -36078,6 +37075,7 @@ __SCHEMA_TYPES = {
36078
37075
  'PlatformType': PlatformTypeClass,
36079
37076
  'SlackUserInfo': SlackUserInfoClass,
36080
37077
  'DataPlatformInstanceProperties': DataPlatformInstancePropertiesClass,
37078
+ 'IcebergWarehouseInfo': IcebergWarehouseInfoClass,
36081
37079
  'DataProcessInfo': DataProcessInfoClass,
36082
37080
  'DataProcessInstanceInput': DataProcessInstanceInputClass,
36083
37081
  'DataProcessInstanceOutput': DataProcessInstanceOutputClass,
@@ -36110,6 +37108,7 @@ __SCHEMA_TYPES = {
36110
37108
  'FineGrainedLineageDownstreamType': FineGrainedLineageDownstreamTypeClass,
36111
37109
  'FineGrainedLineageUpstreamType': FineGrainedLineageUpstreamTypeClass,
36112
37110
  'Histogram': HistogramClass,
37111
+ 'IcebergCatalogInfo': IcebergCatalogInfoClass,
36113
37112
  'PartitionSummary': PartitionSummaryClass,
36114
37113
  'PartitionsSummary': PartitionsSummaryClass,
36115
37114
  'Quantile': QuantileClass,
@@ -36148,7 +37147,10 @@ __SCHEMA_TYPES = {
36148
37147
  'ExecutionRequestSource': ExecutionRequestSourceClass,
36149
37148
  'StructuredExecutionReport': StructuredExecutionReportClass,
36150
37149
  'RemoteExecutorStatus': RemoteExecutorStatusClass,
37150
+ 'RemoteExecutorPoolGlobalConfig': RemoteExecutorPoolGlobalConfigClass,
36151
37151
  'RemoteExecutorPoolInfo': RemoteExecutorPoolInfoClass,
37152
+ 'RemoteExecutorPoolState': RemoteExecutorPoolStateClass,
37153
+ 'RemoteExecutorPoolStatus': RemoteExecutorPoolStatusClass,
36152
37154
  'DomainParams': DomainParamsClass,
36153
37155
  'DynamicFormAssignment': DynamicFormAssignmentClass,
36154
37156
  'FormActorAssignment': FormActorAssignmentClass,
@@ -36228,6 +37230,7 @@ __SCHEMA_TYPES = {
36228
37230
  'DataHubActionKey': DataHubActionKeyClass,
36229
37231
  'DataHubConnectionKey': DataHubConnectionKeyClass,
36230
37232
  'DataHubIngestionSourceKey': DataHubIngestionSourceKeyClass,
37233
+ 'DataHubMetricCubeKey': DataHubMetricCubeKeyClass,
36231
37234
  'DataHubPersonaKey': DataHubPersonaKeyClass,
36232
37235
  'DataHubPolicyKey': DataHubPolicyKeyClass,
36233
37236
  'DataHubRetentionKey': DataHubRetentionKeyClass,
@@ -36265,6 +37268,7 @@ __SCHEMA_TYPES = {
36265
37268
  'PostKey': PostKeyClass,
36266
37269
  'QueryKey': QueryKeyClass,
36267
37270
  'RecommendationModuleKey': RecommendationModuleKeyClass,
37271
+ 'RemoteExecutorGlobalConfigKey': RemoteExecutorGlobalConfigKeyClass,
36268
37272
  'RemoteExecutorKey': RemoteExecutorKeyClass,
36269
37273
  'RemoteExecutorPoolKey': RemoteExecutorPoolKeyClass,
36270
37274
  'RoleKey': RoleKeyClass,
@@ -36312,6 +37316,14 @@ __SCHEMA_TYPES = {
36312
37316
  'MLPrimaryKeySnapshot': MLPrimaryKeySnapshotClass,
36313
37317
  'SchemaFieldSnapshot': SchemaFieldSnapshotClass,
36314
37318
  'TagSnapshot': TagSnapshotClass,
37319
+ 'DataHubMetricCubeDefinition': DataHubMetricCubeDefinitionClass,
37320
+ 'DataHubMetricCubeDimension': DataHubMetricCubeDimensionClass,
37321
+ 'DataHubMetricCubeDimensions': DataHubMetricCubeDimensionsClass,
37322
+ 'DataHubMetricCubeEvent': DataHubMetricCubeEventClass,
37323
+ 'DataHubMetricCubeMeasure': DataHubMetricCubeMeasureClass,
37324
+ 'DataHubMetricCubeMeasures': DataHubMetricCubeMeasuresClass,
37325
+ 'DataHubMetricCubeOrigin': DataHubMetricCubeOriginClass,
37326
+ 'DataHubMetricSourceType': DataHubMetricSourceTypeClass,
36315
37327
  'BaseData': BaseDataClass,
36316
37328
  'CaveatDetails': CaveatDetailsClass,
36317
37329
  'CaveatsAndRecommendations': CaveatsAndRecommendationsClass,
@@ -36566,6 +37578,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
36566
37578
  EditableContainerPropertiesClass,
36567
37579
  ContainerPropertiesClass,
36568
37580
  ContainerClass,
37581
+ RemoteExecutorPoolGlobalConfigClass,
36569
37582
  DataHubRetentionConfigClass,
36570
37583
  TelemetryClientIdClass,
36571
37584
  DataHubAccessTokenInfoClass,
@@ -36609,6 +37622,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
36609
37622
  TestInfoClass,
36610
37623
  BatchTestRunEventClass,
36611
37624
  DataPlatformInstancePropertiesClass,
37625
+ IcebergWarehouseInfoClass,
36612
37626
  EditableERModelRelationshipPropertiesClass,
36613
37627
  ERModelRelationshipPropertiesClass,
36614
37628
  EntityTypeInfoClass,
@@ -36618,6 +37632,8 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
36618
37632
  EditableSchemaMetadataClass,
36619
37633
  SchemaProposalsClass,
36620
37634
  SchemaMetadataClass,
37635
+ DataHubMetricCubeEventClass,
37636
+ DataHubMetricCubeDefinitionClass,
36621
37637
  AccessClass,
36622
37638
  AnomaliesSummaryClass,
36623
37639
  ProposalsClass,
@@ -36666,6 +37682,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
36666
37682
  EditableDatasetPropertiesClass,
36667
37683
  DatasetProfileClass,
36668
37684
  DatasetDeprecationClass,
37685
+ IcebergCatalogInfoClass,
36669
37686
  DatasetPropertiesClass,
36670
37687
  PartitionsSummaryClass,
36671
37688
  DatasetUpstreamLineageClass,
@@ -36710,6 +37727,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
36710
37727
  PostKeyClass,
36711
37728
  MLPrimaryKeyKeyClass,
36712
37729
  DataPlatformInstanceKeyClass,
37730
+ DataHubMetricCubeKeyClass,
36713
37731
  QueryKeyClass,
36714
37732
  DatasetKeyClass,
36715
37733
  ExecutionRequestKeyClass,
@@ -36744,6 +37762,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
36744
37762
  DataHubRetentionKeyClass,
36745
37763
  OwnershipTypeKeyClass,
36746
37764
  ActionRequestKeyClass,
37765
+ RemoteExecutorGlobalConfigKeyClass,
36747
37766
  DataFlowKeyClass,
36748
37767
  GenericEntityKeyClass,
36749
37768
  DataContractKeyClass,
@@ -36835,6 +37854,7 @@ class AspectBag(TypedDict, total=False):
36835
37854
  editableContainerProperties: EditableContainerPropertiesClass
36836
37855
  containerProperties: ContainerPropertiesClass
36837
37856
  container: ContainerClass
37857
+ dataHubRemoteExecutorPoolGlobalConfig: RemoteExecutorPoolGlobalConfigClass
36838
37858
  dataHubRetentionConfig: DataHubRetentionConfigClass
36839
37859
  telemetryClientId: TelemetryClientIdClass
36840
37860
  dataHubAccessTokenInfo: DataHubAccessTokenInfoClass
@@ -36878,6 +37898,7 @@ class AspectBag(TypedDict, total=False):
36878
37898
  testInfo: TestInfoClass
36879
37899
  batchTestRunEvent: BatchTestRunEventClass
36880
37900
  dataPlatformInstanceProperties: DataPlatformInstancePropertiesClass
37901
+ icebergWarehouseInfo: IcebergWarehouseInfoClass
36881
37902
  editableERModelRelationshipProperties: EditableERModelRelationshipPropertiesClass
36882
37903
  erModelRelationshipProperties: ERModelRelationshipPropertiesClass
36883
37904
  entityTypeInfo: EntityTypeInfoClass
@@ -36887,6 +37908,8 @@ class AspectBag(TypedDict, total=False):
36887
37908
  editableSchemaMetadata: EditableSchemaMetadataClass
36888
37909
  schemaProposals: SchemaProposalsClass
36889
37910
  schemaMetadata: SchemaMetadataClass
37911
+ dataHubMetricCubeEvent: DataHubMetricCubeEventClass
37912
+ dataHubMetricCubeDefinition: DataHubMetricCubeDefinitionClass
36890
37913
  access: AccessClass
36891
37914
  anomaliesSummary: AnomaliesSummaryClass
36892
37915
  proposals: ProposalsClass
@@ -36935,6 +37958,7 @@ class AspectBag(TypedDict, total=False):
36935
37958
  editableDatasetProperties: EditableDatasetPropertiesClass
36936
37959
  datasetProfile: DatasetProfileClass
36937
37960
  datasetDeprecation: DatasetDeprecationClass
37961
+ icebergCatalogInfo: IcebergCatalogInfoClass
36938
37962
  datasetProperties: DatasetPropertiesClass
36939
37963
  partitionsSummary: PartitionsSummaryClass
36940
37964
  datasetUpstreamLineage: DatasetUpstreamLineageClass
@@ -36979,6 +38003,7 @@ class AspectBag(TypedDict, total=False):
36979
38003
  postKey: PostKeyClass
36980
38004
  mlPrimaryKeyKey: MLPrimaryKeyKeyClass
36981
38005
  dataPlatformInstanceKey: DataPlatformInstanceKeyClass
38006
+ dataHubMetricCubeKey: DataHubMetricCubeKeyClass
36982
38007
  queryKey: QueryKeyClass
36983
38008
  datasetKey: DatasetKeyClass
36984
38009
  dataHubExecutionRequestKey: ExecutionRequestKeyClass
@@ -37013,6 +38038,7 @@ class AspectBag(TypedDict, total=False):
37013
38038
  dataHubRetentionKey: DataHubRetentionKeyClass
37014
38039
  ownershipTypeKey: OwnershipTypeKeyClass
37015
38040
  actionRequestKey: ActionRequestKeyClass
38041
+ dataHubRemoteExecutorGlobalConfigKey: RemoteExecutorGlobalConfigKeyClass
37016
38042
  dataFlowKey: DataFlowKeyClass
37017
38043
  genericEntityKey: GenericEntityKeyClass
37018
38044
  dataContractKey: DataContractKeyClass
@@ -37077,6 +38103,7 @@ KEY_ASPECTS: Dict[str, Type[_Aspect]] = {
37077
38103
  'post': PostKeyClass,
37078
38104
  'mlPrimaryKey': MLPrimaryKeyKeyClass,
37079
38105
  'dataPlatformInstance': DataPlatformInstanceKeyClass,
38106
+ 'dataHubMetricCube': DataHubMetricCubeKeyClass,
37080
38107
  'query': QueryKeyClass,
37081
38108
  'dataset': DatasetKeyClass,
37082
38109
  'dataHubExecutionRequest': ExecutionRequestKeyClass,
@@ -37111,6 +38138,7 @@ KEY_ASPECTS: Dict[str, Type[_Aspect]] = {
37111
38138
  'dataHubRetention': DataHubRetentionKeyClass,
37112
38139
  'ownershipType': OwnershipTypeKeyClass,
37113
38140
  'actionRequest': ActionRequestKeyClass,
38141
+ 'dataHubRemoteExecutorGlobalConfig': RemoteExecutorGlobalConfigKeyClass,
37114
38142
  'dataFlow': DataFlowKeyClass,
37115
38143
  'dataContract': DataContractKeyClass,
37116
38144
  'dataHubConnection': DataHubConnectionKeyClass,