acryl-datahub-cloud 0.3.9.2rc1__py3-none-any.whl → 0.3.10rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub-cloud might be problematic. Click here for more details.

Files changed (55) hide show
  1. acryl_datahub_cloud/_codegen_config.json +1 -1
  2. acryl_datahub_cloud/action_request/action_request_owner_source.py +7 -1
  3. acryl_datahub_cloud/metadata/_urns/urn_defs.py +80 -56
  4. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/anomaly/__init__.py +2 -12
  5. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/assertion/__init__.py +6 -0
  6. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/common/__init__.py +2 -0
  7. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/metadata/key/__init__.py +2 -0
  8. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/monitor/__init__.py +16 -0
  9. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/timeseries/__init__.py +8 -0
  10. acryl_datahub_cloud/metadata/schema.avsc +1137 -663
  11. acryl_datahub_cloud/metadata/schema_classes.py +1094 -310
  12. acryl_datahub_cloud/metadata/schemas/AnomalyKey.avsc +1 -7
  13. acryl_datahub_cloud/metadata/schemas/AssertionAnalyticsRunEvent.avsc +445 -213
  14. acryl_datahub_cloud/metadata/schemas/AssertionInferenceDetails.avsc +237 -12
  15. acryl_datahub_cloud/metadata/schemas/AssertionInfo.avsc +277 -212
  16. acryl_datahub_cloud/metadata/schemas/AssertionRunEvent.avsc +445 -213
  17. acryl_datahub_cloud/metadata/schemas/CorpGroupKey.avsc +1 -0
  18. acryl_datahub_cloud/metadata/schemas/CorpGroupSettings.avsc +48 -1
  19. acryl_datahub_cloud/metadata/schemas/CorpUserKey.avsc +3 -2
  20. acryl_datahub_cloud/metadata/schemas/CorpUserSettings.avsc +48 -1
  21. acryl_datahub_cloud/metadata/schemas/DataProcessInstanceInput.avsc +2 -1
  22. acryl_datahub_cloud/metadata/schemas/DataProcessInstanceOutput.avsc +2 -1
  23. acryl_datahub_cloud/metadata/schemas/DataProcessKey.avsc +2 -1
  24. acryl_datahub_cloud/metadata/schemas/DataProductKey.avsc +1 -0
  25. acryl_datahub_cloud/metadata/schemas/Deprecation.avsc +2 -0
  26. acryl_datahub_cloud/metadata/schemas/GlobalSettingsInfo.avsc +1 -1
  27. acryl_datahub_cloud/metadata/schemas/GlossaryNodeKey.avsc +1 -0
  28. acryl_datahub_cloud/metadata/schemas/GlossaryTermKey.avsc +1 -0
  29. acryl_datahub_cloud/metadata/schemas/InstitutionalMemory.avsc +31 -0
  30. acryl_datahub_cloud/metadata/schemas/MLFeatureKey.avsc +1 -0
  31. acryl_datahub_cloud/metadata/schemas/MLFeatureTableKey.avsc +1 -0
  32. acryl_datahub_cloud/metadata/schemas/MLModelDeploymentKey.avsc +2 -1
  33. acryl_datahub_cloud/metadata/schemas/MLModelGroupKey.avsc +3 -0
  34. acryl_datahub_cloud/metadata/schemas/MLModelGroupProperties.avsc +16 -0
  35. acryl_datahub_cloud/metadata/schemas/MLModelKey.avsc +2 -0
  36. acryl_datahub_cloud/metadata/schemas/MLPrimaryKeyKey.avsc +1 -0
  37. acryl_datahub_cloud/metadata/schemas/MetadataChangeEvent.avsc +49 -0
  38. acryl_datahub_cloud/metadata/schemas/MonitorAnomalyEvent.avsc +265 -0
  39. acryl_datahub_cloud/metadata/schemas/MonitorInfo.avsc +549 -212
  40. acryl_datahub_cloud/metadata/schemas/MonitorKey.avsc +2 -1
  41. acryl_datahub_cloud/metadata/schemas/MonitorSuiteInfo.avsc +790 -0
  42. acryl_datahub_cloud/metadata/schemas/MonitorSuiteKey.avsc +21 -0
  43. acryl_datahub_cloud/metadata/schemas/NotificationRequest.avsc +3 -1
  44. acryl_datahub_cloud/metadata/schemas/PostKey.avsc +1 -0
  45. acryl_datahub_cloud/metadata/schemas/QueryProperties.avsc +20 -0
  46. acryl_datahub_cloud/metadata/schemas/RemoteExecutorPoolInfo.avsc +9 -0
  47. acryl_datahub_cloud/metadata/schemas/SchemaFieldKey.avsc +1 -0
  48. acryl_datahub_cloud/metadata/schemas/Siblings.avsc +2 -0
  49. acryl_datahub_cloud/metadata/schemas/SubscriptionInfo.avsc +48 -1
  50. {acryl_datahub_cloud-0.3.9.2rc1.dist-info → acryl_datahub_cloud-0.3.10rc1.dist-info}/METADATA +43 -43
  51. {acryl_datahub_cloud-0.3.9.2rc1.dist-info → acryl_datahub_cloud-0.3.10rc1.dist-info}/RECORD +54 -52
  52. acryl_datahub_cloud/metadata/schemas/AnomalyInfo.avsc +0 -342
  53. {acryl_datahub_cloud-0.3.9.2rc1.dist-info → acryl_datahub_cloud-0.3.10rc1.dist-info}/WHEEL +0 -0
  54. {acryl_datahub_cloud-0.3.9.2rc1.dist-info → acryl_datahub_cloud-0.3.10rc1.dist-info}/entry_points.txt +0 -0
  55. {acryl_datahub_cloud-0.3.9.2rc1.dist-info → acryl_datahub_cloud-0.3.10rc1.dist-info}/top_level.txt +0 -0
@@ -1687,190 +1687,9 @@ class InferenceMetadataClass(DictWrapper):
1687
1687
  self._inner_dict['confidenceLevel'] = value
1688
1688
 
1689
1689
 
1690
- class AnomalyInfoClass(_Aspect):
1691
- """Information about an anomaly raised on an asset."""
1692
-
1693
-
1694
- ASPECT_NAME = 'anomalyInfo'
1695
- ASPECT_INFO = {}
1696
- RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.anomaly.AnomalyInfo")
1697
-
1698
- def __init__(self,
1699
- type: Union[str, "AnomalyTypeClass"],
1700
- entity: str,
1701
- status: "AnomalyStatusClass",
1702
- source: "AnomalySourceClass",
1703
- created: "AuditStampClass",
1704
- description: Union[None, str]=None,
1705
- severity: Optional[Union[int, None]]=None,
1706
- review: Union[None, "AnomalyReviewClass"]=None,
1707
- ):
1708
- super().__init__()
1709
-
1710
- self.type = type
1711
- self.description = description
1712
- self.entity = entity
1713
- if severity is None:
1714
- # default: 0
1715
- self.severity = self.RECORD_SCHEMA.fields_dict["severity"].default
1716
- else:
1717
- self.severity = severity
1718
- self.status = status
1719
- self.review = review
1720
- self.source = source
1721
- self.created = created
1722
-
1723
- def _restore_defaults(self) -> None:
1724
- self.type = AnomalyTypeClass.FRESHNESS
1725
- self.description = self.RECORD_SCHEMA.fields_dict["description"].default
1726
- self.entity = str()
1727
- self.severity = self.RECORD_SCHEMA.fields_dict["severity"].default
1728
- self.status = AnomalyStatusClass._construct_with_defaults()
1729
- self.review = self.RECORD_SCHEMA.fields_dict["review"].default
1730
- self.source = AnomalySourceClass._construct_with_defaults()
1731
- self.created = AuditStampClass._construct_with_defaults()
1732
-
1733
-
1734
- @property
1735
- def type(self) -> Union[str, "AnomalyTypeClass"]:
1736
- """The type of anomaly"""
1737
- return self._inner_dict.get('type') # type: ignore
1738
-
1739
- @type.setter
1740
- def type(self, value: Union[str, "AnomalyTypeClass"]) -> None:
1741
- self._inner_dict['type'] = value
1742
-
1743
-
1744
- @property
1745
- def description(self) -> Union[None, str]:
1746
- """Optional description associated with the anomaly. e.g. an explanation in human-readable form."""
1747
- return self._inner_dict.get('description') # type: ignore
1748
-
1749
- @description.setter
1750
- def description(self, value: Union[None, str]) -> None:
1751
- self._inner_dict['description'] = value
1752
-
1753
-
1754
- @property
1755
- def entity(self) -> str:
1756
- """A reference to the entity associated with the anomaly."""
1757
- return self._inner_dict.get('entity') # type: ignore
1758
-
1759
- @entity.setter
1760
- def entity(self, value: str) -> None:
1761
- self._inner_dict['entity'] = value
1762
-
1763
-
1764
- @property
1765
- def severity(self) -> Union[int, None]:
1766
- """A numeric severity for the anomaly.
1767
- This reflects the significance of the anomaly and is based on
1768
- how far out of the norm the anomaly resides."""
1769
- return self._inner_dict.get('severity') # type: ignore
1770
-
1771
- @severity.setter
1772
- def severity(self, value: Union[int, None]) -> None:
1773
- self._inner_dict['severity'] = value
1774
-
1775
-
1776
- @property
1777
- def status(self) -> "AnomalyStatusClass":
1778
- """The current status of the anomaly, whether it is active or resolved."""
1779
- return self._inner_dict.get('status') # type: ignore
1780
-
1781
- @status.setter
1782
- def status(self, value: "AnomalyStatusClass") -> None:
1783
- self._inner_dict['status'] = value
1784
-
1785
-
1786
- @property
1787
- def review(self) -> Union[None, "AnomalyReviewClass"]:
1788
- """The review of the anomaly, based on human-provided feedback.
1789
- If this is not present, then the Anomaly has not yet been reviewed."""
1790
- return self._inner_dict.get('review') # type: ignore
1791
-
1792
- @review.setter
1793
- def review(self, value: Union[None, "AnomalyReviewClass"]) -> None:
1794
- self._inner_dict['review'] = value
1795
-
1796
-
1797
- @property
1798
- def source(self) -> "AnomalySourceClass":
1799
- """The source of an anomaly, i.e. how it was generated."""
1800
- return self._inner_dict.get('source') # type: ignore
1801
-
1802
- @source.setter
1803
- def source(self, value: "AnomalySourceClass") -> None:
1804
- self._inner_dict['source'] = value
1805
-
1806
-
1807
- @property
1808
- def created(self) -> "AuditStampClass":
1809
- """The time at which the request was initially created"""
1810
- return self._inner_dict.get('created') # type: ignore
1811
-
1812
- @created.setter
1813
- def created(self, value: "AuditStampClass") -> None:
1814
- self._inner_dict['created'] = value
1815
-
1816
-
1817
- class AnomalyReviewClass(DictWrapper):
1818
- """The human-provided review of the anomaly."""
1819
-
1820
- RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.anomaly.AnomalyReview")
1821
- def __init__(self,
1822
- state: Union[str, "AnomalyReviewStateClass"],
1823
- lastUpdated: "AuditStampClass",
1824
- message: Union[None, str]=None,
1825
- ):
1826
- super().__init__()
1827
-
1828
- self.state = state
1829
- self.message = message
1830
- self.lastUpdated = lastUpdated
1831
-
1832
- def _restore_defaults(self) -> None:
1833
- self.state = AnomalyReviewStateClass.PENDING
1834
- self.message = self.RECORD_SCHEMA.fields_dict["message"].default
1835
- self.lastUpdated = AuditStampClass._construct_with_defaults()
1836
-
1837
-
1838
- @property
1839
- def state(self) -> Union[str, "AnomalyReviewStateClass"]:
1840
- """The state of the anomaly"""
1841
- return self._inner_dict.get('state') # type: ignore
1842
-
1843
- @state.setter
1844
- def state(self, value: Union[str, "AnomalyReviewStateClass"]) -> None:
1845
- self._inner_dict['state'] = value
1846
-
1847
-
1848
- @property
1849
- def message(self) -> Union[None, str]:
1850
- """An optional explanation of the review."""
1851
- return self._inner_dict.get('message') # type: ignore
1852
-
1853
- @message.setter
1854
- def message(self, value: Union[None, str]) -> None:
1855
- self._inner_dict['message'] = value
1856
-
1857
-
1858
- @property
1859
- def lastUpdated(self) -> "AuditStampClass":
1860
- """The time at which the anomaly review state last changed"""
1861
- return self._inner_dict.get('lastUpdated') # type: ignore
1862
-
1863
- @lastUpdated.setter
1864
- def lastUpdated(self, value: "AuditStampClass") -> None:
1865
- self._inner_dict['lastUpdated'] = value
1866
-
1867
-
1868
1690
  class AnomalyReviewStateClass(object):
1869
1691
  # No docs available.
1870
1692
 
1871
- PENDING = "PENDING"
1872
- """The anomaly is pending human review."""
1873
-
1874
1693
  CONFIRMED = "CONFIRMED"
1875
1694
  """The anomaly has been confirmed by a human reviewer. This means the anomaly was validated."""
1876
1695
 
@@ -1938,13 +1757,16 @@ class AnomalySourcePropertiesClass(DictWrapper):
1938
1757
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.anomaly.AnomalySourceProperties")
1939
1758
  def __init__(self,
1940
1759
  assertionRunEventTime: Union[None, int]=None,
1760
+ metricCubeTimestamp: Union[None, int]=None,
1941
1761
  ):
1942
1762
  super().__init__()
1943
1763
 
1944
1764
  self.assertionRunEventTime = assertionRunEventTime
1765
+ self.metricCubeTimestamp = metricCubeTimestamp
1945
1766
 
1946
1767
  def _restore_defaults(self) -> None:
1947
1768
  self.assertionRunEventTime = self.RECORD_SCHEMA.fields_dict["assertionRunEventTime"].default
1769
+ self.metricCubeTimestamp = self.RECORD_SCHEMA.fields_dict["metricCubeTimestamp"].default
1948
1770
 
1949
1771
 
1950
1772
  @property
@@ -1957,114 +1779,148 @@ class AnomalySourcePropertiesClass(DictWrapper):
1957
1779
  self._inner_dict['assertionRunEventTime'] = value
1958
1780
 
1959
1781
 
1960
- class AnomalySourceTypeClass(object):
1961
- # No docs available.
1962
-
1963
- INFERRED_ASSERTION_FAILURE = "INFERRED_ASSERTION_FAILURE"
1964
- """An assertion has failed, triggering the anomaly."""
1782
+ @property
1783
+ def metricCubeTimestamp(self) -> Union[None, int]:
1784
+ """The timestamp associated with the metric cube value that triggered the anomaly."""
1785
+ return self._inner_dict.get('metricCubeTimestamp') # type: ignore
1965
1786
 
1787
+ @metricCubeTimestamp.setter
1788
+ def metricCubeTimestamp(self, value: Union[None, int]) -> None:
1789
+ self._inner_dict['metricCubeTimestamp'] = value
1966
1790
 
1967
1791
 
1968
- class AnomalyStateClass(object):
1792
+ class AnomalySourceTypeClass(object):
1969
1793
  # No docs available.
1970
1794
 
1971
- ACTIVE = "ACTIVE"
1972
- """The anomaly is ongoing, or active."""
1973
-
1974
- RESOLVED = "RESOLVED"
1975
- """The anomaly is resolved."""
1976
-
1795
+ INFERRED_ASSERTION_FAILURE = "INFERRED_ASSERTION_FAILURE"
1796
+ """An assertion has failed, triggering the anomaly."""
1977
1797
 
1978
1798
 
1979
- class AnomalyStatusClass(DictWrapper):
1980
- """The status of an anomaly raised on an asset"""
1981
1799
 
1982
- RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.anomaly.AnomalyStatus")
1800
+ class MonitorAnomalyEventClass(_Aspect):
1801
+ """Information about an anomaly generated by a monitor"""
1802
+
1803
+
1804
+ ASPECT_NAME = 'monitorAnomalyEvent'
1805
+ ASPECT_TYPE = 'timeseries'
1806
+ ASPECT_INFO = {}
1807
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.anomaly.MonitorAnomalyEvent")
1808
+
1983
1809
  def __init__(self,
1984
- state: Union[str, "AnomalyStateClass"],
1985
- lastUpdated: "AuditStampClass",
1986
- properties: Union[None, "AnomalyStatusPropertiesClass"]=None,
1810
+ timestampMillis: int,
1811
+ state: Union[str, "AnomalyReviewStateClass"],
1812
+ source: "AnomalySourceClass",
1813
+ created: "TimeStampClass",
1814
+ lastUpdated: "TimeStampClass",
1815
+ eventGranularity: Union[None, "TimeWindowSizeClass"]=None,
1816
+ partitionSpec: Optional[Union["PartitionSpecClass", None]]=None,
1817
+ messageId: Union[None, str]=None,
1987
1818
  ):
1988
1819
  super().__init__()
1989
1820
 
1821
+ self.timestampMillis = timestampMillis
1822
+ self.eventGranularity = eventGranularity
1823
+ if partitionSpec is None:
1824
+ # default: {'partition': 'FULL_TABLE_SNAPSHOT', 'type': 'FULL_TABLE', 'timePartition': None}
1825
+ self.partitionSpec = _json_converter.from_json_object(self.RECORD_SCHEMA.fields_dict["partitionSpec"].default, writers_schema=self.RECORD_SCHEMA.fields_dict["partitionSpec"].type)
1826
+ else:
1827
+ self.partitionSpec = partitionSpec
1828
+ self.messageId = messageId
1990
1829
  self.state = state
1991
- self.properties = properties
1830
+ self.source = source
1831
+ self.created = created
1992
1832
  self.lastUpdated = lastUpdated
1993
1833
 
1994
1834
  def _restore_defaults(self) -> None:
1995
- self.state = AnomalyStateClass.ACTIVE
1996
- self.properties = self.RECORD_SCHEMA.fields_dict["properties"].default
1997
- self.lastUpdated = AuditStampClass._construct_with_defaults()
1835
+ self.timestampMillis = int()
1836
+ self.eventGranularity = self.RECORD_SCHEMA.fields_dict["eventGranularity"].default
1837
+ self.partitionSpec = _json_converter.from_json_object(self.RECORD_SCHEMA.fields_dict["partitionSpec"].default, writers_schema=self.RECORD_SCHEMA.fields_dict["partitionSpec"].type)
1838
+ self.messageId = self.RECORD_SCHEMA.fields_dict["messageId"].default
1839
+ self.state = AnomalyReviewStateClass.CONFIRMED
1840
+ self.source = AnomalySourceClass._construct_with_defaults()
1841
+ self.created = TimeStampClass._construct_with_defaults()
1842
+ self.lastUpdated = TimeStampClass._construct_with_defaults()
1998
1843
 
1999
1844
 
2000
1845
  @property
2001
- def state(self) -> Union[str, "AnomalyStateClass"]:
2002
- """The state of the anomaly"""
2003
- return self._inner_dict.get('state') # type: ignore
1846
+ def timestampMillis(self) -> int:
1847
+ """The event timestamp field as epoch at UTC in milli seconds."""
1848
+ return self._inner_dict.get('timestampMillis') # type: ignore
2004
1849
 
2005
- @state.setter
2006
- def state(self, value: Union[str, "AnomalyStateClass"]) -> None:
2007
- self._inner_dict['state'] = value
1850
+ @timestampMillis.setter
1851
+ def timestampMillis(self, value: int) -> None:
1852
+ self._inner_dict['timestampMillis'] = value
2008
1853
 
2009
1854
 
2010
1855
  @property
2011
- def properties(self) -> Union[None, "AnomalyStatusPropertiesClass"]:
2012
- """Additional properties about the status, for example which evaluation
2013
- of an assertion generated an Anomaly."""
2014
- return self._inner_dict.get('properties') # type: ignore
1856
+ def eventGranularity(self) -> Union[None, "TimeWindowSizeClass"]:
1857
+ """Granularity of the event if applicable"""
1858
+ return self._inner_dict.get('eventGranularity') # type: ignore
2015
1859
 
2016
- @properties.setter
2017
- def properties(self, value: Union[None, "AnomalyStatusPropertiesClass"]) -> None:
2018
- self._inner_dict['properties'] = value
1860
+ @eventGranularity.setter
1861
+ def eventGranularity(self, value: Union[None, "TimeWindowSizeClass"]) -> None:
1862
+ self._inner_dict['eventGranularity'] = value
2019
1863
 
2020
1864
 
2021
1865
  @property
2022
- def lastUpdated(self) -> "AuditStampClass":
2023
- """The time at which the anomaly state last changed"""
2024
- return self._inner_dict.get('lastUpdated') # type: ignore
1866
+ def partitionSpec(self) -> Union["PartitionSpecClass", None]:
1867
+ """The optional partition specification."""
1868
+ return self._inner_dict.get('partitionSpec') # type: ignore
2025
1869
 
2026
- @lastUpdated.setter
2027
- def lastUpdated(self, value: "AuditStampClass") -> None:
2028
- self._inner_dict['lastUpdated'] = value
1870
+ @partitionSpec.setter
1871
+ def partitionSpec(self, value: Union["PartitionSpecClass", None]) -> None:
1872
+ self._inner_dict['partitionSpec'] = value
2029
1873
 
2030
1874
 
2031
- class AnomalyStatusPropertiesClass(DictWrapper):
2032
- """Ad-hoc properties about an anomaly status."""
1875
+ @property
1876
+ def messageId(self) -> Union[None, str]:
1877
+ """The optional messageId, if provided serves as a custom user-defined unique identifier for an aspect value."""
1878
+ return self._inner_dict.get('messageId') # type: ignore
2033
1879
 
2034
- RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.anomaly.AnomalyStatusProperties")
2035
- def __init__(self,
2036
- assertionRunEventTime: Union[None, int]=None,
2037
- ):
2038
- super().__init__()
2039
-
2040
- self.assertionRunEventTime = assertionRunEventTime
1880
+ @messageId.setter
1881
+ def messageId(self, value: Union[None, str]) -> None:
1882
+ self._inner_dict['messageId'] = value
2041
1883
 
2042
- def _restore_defaults(self) -> None:
2043
- self.assertionRunEventTime = self.RECORD_SCHEMA.fields_dict["assertionRunEventTime"].default
1884
+
1885
+ @property
1886
+ def state(self) -> Union[str, "AnomalyReviewStateClass"]:
1887
+ """The review of the anomaly, based on human-provided feedback.
1888
+ If this is not present, then the Anomaly has not yet been reviewed."""
1889
+ return self._inner_dict.get('state') # type: ignore
1890
+
1891
+ @state.setter
1892
+ def state(self, value: Union[str, "AnomalyReviewStateClass"]) -> None:
1893
+ self._inner_dict['state'] = value
2044
1894
 
2045
1895
 
2046
1896
  @property
2047
- def assertionRunEventTime(self) -> Union[None, int]:
2048
- """The timestampMillis field of the AssertionRunEvent which altered the anomaly status the anomaly (if applicable)."""
2049
- return self._inner_dict.get('assertionRunEventTime') # type: ignore
1897
+ def source(self) -> "AnomalySourceClass":
1898
+ """The source of an anomaly, i.e. how it was generated."""
1899
+ return self._inner_dict.get('source') # type: ignore
2050
1900
 
2051
- @assertionRunEventTime.setter
2052
- def assertionRunEventTime(self, value: Union[None, int]) -> None:
2053
- self._inner_dict['assertionRunEventTime'] = value
1901
+ @source.setter
1902
+ def source(self, value: "AnomalySourceClass") -> None:
1903
+ self._inner_dict['source'] = value
2054
1904
 
2055
1905
 
2056
- class AnomalyTypeClass(object):
2057
- """A type of an anomaly"""
1906
+ @property
1907
+ def created(self) -> "TimeStampClass":
1908
+ """The time at which the request was initially created"""
1909
+ return self._inner_dict.get('created') # type: ignore
2058
1910
 
2059
- FRESHNESS = "FRESHNESS"
2060
- """An inferred Freshness Assertion has failed, triggering the anomaly."""
1911
+ @created.setter
1912
+ def created(self, value: "TimeStampClass") -> None:
1913
+ self._inner_dict['created'] = value
2061
1914
 
2062
- DATASET_COLUMN = "DATASET_COLUMN"
2063
- """An inferred assertion on a particular column(s) of a Dataset has triggered the anomaly."""
2064
1915
 
2065
- DATASET_ROWS = "DATASET_ROWS"
2066
- """An inferred assertion on the row count of a Dataset has triggered the anomaly."""
1916
+ @property
1917
+ def lastUpdated(self) -> "TimeStampClass":
1918
+ """The time at which the request was initially created"""
1919
+ return self._inner_dict.get('lastUpdated') # type: ignore
2067
1920
 
1921
+ @lastUpdated.setter
1922
+ def lastUpdated(self, value: "TimeStampClass") -> None:
1923
+ self._inner_dict['lastUpdated'] = value
2068
1924
 
2069
1925
 
2070
1926
  class AdjustmentAlgorithmClass(object):
@@ -2163,28 +2019,40 @@ class AssertionActionsClass(_Aspect):
2163
2019
 
2164
2020
  class AssertionAdjustmentSettingsClass(DictWrapper):
2165
2021
  """A set of settings that can be used to adjust assertion values
2166
- This is mainly applied against inferred assertions"""
2022
+ NOTE: This is only applied against inferred assertions"""
2167
2023
 
2168
2024
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.assertion.AssertionAdjustmentSettings")
2169
2025
  def __init__(self,
2170
- algorithm: Union[str, "AdjustmentAlgorithmClass"],
2171
- algorithmName: str,
2026
+ algorithm: Union[None, Union[str, "AdjustmentAlgorithmClass"]]=None,
2027
+ algorithmName: Union[None, str]=None,
2172
2028
  context: Union[None, Dict[str, str]]=None,
2029
+ exclusionWindows: Union[None, List["AssertionExclusionWindowClass"]]=None,
2030
+ anomalyExclusionWindows: Union[None, List["AssertionExclusionWindowClass"]]=None,
2031
+ trainingDataLookbackWindowDays: Union[None, int]=None,
2032
+ sensitivity: Union[None, "AssertionMonitorSensitivityClass"]=None,
2173
2033
  ):
2174
2034
  super().__init__()
2175
2035
 
2176
2036
  self.algorithm = algorithm
2177
2037
  self.algorithmName = algorithmName
2178
2038
  self.context = context
2039
+ self.exclusionWindows = exclusionWindows
2040
+ self.anomalyExclusionWindows = anomalyExclusionWindows
2041
+ self.trainingDataLookbackWindowDays = trainingDataLookbackWindowDays
2042
+ self.sensitivity = sensitivity
2179
2043
 
2180
2044
  def _restore_defaults(self) -> None:
2181
- self.algorithm = AdjustmentAlgorithmClass.CUSTOM
2182
- self.algorithmName = str()
2045
+ self.algorithm = self.RECORD_SCHEMA.fields_dict["algorithm"].default
2046
+ self.algorithmName = self.RECORD_SCHEMA.fields_dict["algorithmName"].default
2183
2047
  self.context = self.RECORD_SCHEMA.fields_dict["context"].default
2048
+ self.exclusionWindows = self.RECORD_SCHEMA.fields_dict["exclusionWindows"].default
2049
+ self.anomalyExclusionWindows = self.RECORD_SCHEMA.fields_dict["anomalyExclusionWindows"].default
2050
+ self.trainingDataLookbackWindowDays = self.RECORD_SCHEMA.fields_dict["trainingDataLookbackWindowDays"].default
2051
+ self.sensitivity = self.RECORD_SCHEMA.fields_dict["sensitivity"].default
2184
2052
 
2185
2053
 
2186
2054
  @property
2187
- def algorithm(self) -> Union[str, "AdjustmentAlgorithmClass"]:
2055
+ def algorithm(self) -> Union[None, Union[str, "AdjustmentAlgorithmClass"]]:
2188
2056
  """The algorithm to use to adjust assertion values to power the sensitivity control feature
2189
2057
 
2190
2058
  Note that for algorithm "STDDEV" which leverages the standard deviation of trailing values,
@@ -2192,17 +2060,17 @@ class AssertionAdjustmentSettingsClass(DictWrapper):
2192
2060
  return self._inner_dict.get('algorithm') # type: ignore
2193
2061
 
2194
2062
  @algorithm.setter
2195
- def algorithm(self, value: Union[str, "AdjustmentAlgorithmClass"]) -> None:
2063
+ def algorithm(self, value: Union[None, Union[str, "AdjustmentAlgorithmClass"]]) -> None:
2196
2064
  self._inner_dict['algorithm'] = value
2197
2065
 
2198
2066
 
2199
2067
  @property
2200
- def algorithmName(self) -> str:
2068
+ def algorithmName(self) -> Union[None, str]:
2201
2069
  """The name of the algorithm to use to adjust assertion values to power the sensitivity control feature"""
2202
2070
  return self._inner_dict.get('algorithmName') # type: ignore
2203
2071
 
2204
2072
  @algorithmName.setter
2205
- def algorithmName(self, value: str) -> None:
2073
+ def algorithmName(self, value: Union[None, str]) -> None:
2206
2074
  self._inner_dict['algorithmName'] = value
2207
2075
 
2208
2076
 
@@ -2216,6 +2084,48 @@ class AssertionAdjustmentSettingsClass(DictWrapper):
2216
2084
  self._inner_dict['context'] = value
2217
2085
 
2218
2086
 
2087
+ @property
2088
+ def exclusionWindows(self) -> Union[None, List["AssertionExclusionWindowClass"]]:
2089
+ """The user-defined exclusion windows for the assertion."""
2090
+ return self._inner_dict.get('exclusionWindows') # type: ignore
2091
+
2092
+ @exclusionWindows.setter
2093
+ def exclusionWindows(self, value: Union[None, List["AssertionExclusionWindowClass"]]) -> None:
2094
+ self._inner_dict['exclusionWindows'] = value
2095
+
2096
+
2097
+ @property
2098
+ def anomalyExclusionWindows(self) -> Union[None, List["AssertionExclusionWindowClass"]]:
2099
+ """Exclude the anomaly data points for the assertion. These are detected automatically by the system."""
2100
+ return self._inner_dict.get('anomalyExclusionWindows') # type: ignore
2101
+
2102
+ @anomalyExclusionWindows.setter
2103
+ def anomalyExclusionWindows(self, value: Union[None, List["AssertionExclusionWindowClass"]]) -> None:
2104
+ self._inner_dict['anomalyExclusionWindows'] = value
2105
+
2106
+
2107
+ @property
2108
+ def trainingDataLookbackWindowDays(self) -> Union[None, int]:
2109
+ """The lookback window configuration for retrieving training data points.
2110
+ Defines how far back in time to look when gathering data for training.
2111
+ Defaults to 60 days if not specified."""
2112
+ return self._inner_dict.get('trainingDataLookbackWindowDays') # type: ignore
2113
+
2114
+ @trainingDataLookbackWindowDays.setter
2115
+ def trainingDataLookbackWindowDays(self, value: Union[None, int]) -> None:
2116
+ self._inner_dict['trainingDataLookbackWindowDays'] = value
2117
+
2118
+
2119
+ @property
2120
+ def sensitivity(self) -> Union[None, "AssertionMonitorSensitivityClass"]:
2121
+ """The sensitivity level for an assertion"""
2122
+ return self._inner_dict.get('sensitivity') # type: ignore
2123
+
2124
+ @sensitivity.setter
2125
+ def sensitivity(self, value: Union[None, "AssertionMonitorSensitivityClass"]) -> None:
2126
+ self._inner_dict['sensitivity'] = value
2127
+
2128
+
2219
2129
  class AssertionAnalyticsRunEventClass(_Aspect):
2220
2130
  """An event representing the current status of evaluating an assertion on a batch.
2221
2131
  This should be derrived from {@link AssertionRunEvent}.
@@ -2697,8 +2607,107 @@ class AssertionDryRunResultClass(DictWrapper):
2697
2607
  self._inner_dict['error'] = value
2698
2608
 
2699
2609
 
2610
+ class AssertionExclusionWindowClass(DictWrapper):
2611
+ """Information about an assertion exclusion window.
2612
+ This is used to exclude specific time periods from assertion evaluation or training.
2613
+ For example, excluding holidays, weekends, or known anomalous periods.
2614
+ Supports various recurrence patterns: one-off exclusions, or recurring exclusions
2615
+ (yearly, monthly, weekly, daily, or specific holidays)."""
2616
+
2617
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.assertion.AssertionExclusionWindow")
2618
+ def __init__(self,
2619
+ type: Union[str, "AssertionExclusionWindowTypeClass"],
2620
+ displayName: Union[None, str]=None,
2621
+ fixedRange: Union[None, "AbsoluteTimeWindowClass"]=None,
2622
+ weekly: Union[None, "WeeklyWindowClass"]=None,
2623
+ holiday: Union[None, "HolidayWindowClass"]=None,
2624
+ ):
2625
+ super().__init__()
2626
+
2627
+ self.type = type
2628
+ self.displayName = displayName
2629
+ self.fixedRange = fixedRange
2630
+ self.weekly = weekly
2631
+ self.holiday = holiday
2632
+
2633
+ def _restore_defaults(self) -> None:
2634
+ self.type = AssertionExclusionWindowTypeClass.FIXED_RANGE
2635
+ self.displayName = self.RECORD_SCHEMA.fields_dict["displayName"].default
2636
+ self.fixedRange = self.RECORD_SCHEMA.fields_dict["fixedRange"].default
2637
+ self.weekly = self.RECORD_SCHEMA.fields_dict["weekly"].default
2638
+ self.holiday = self.RECORD_SCHEMA.fields_dict["holiday"].default
2639
+
2640
+
2641
+ @property
2642
+ def type(self) -> Union[str, "AssertionExclusionWindowTypeClass"]:
2643
+ """The type of exclusion window."""
2644
+ return self._inner_dict.get('type') # type: ignore
2645
+
2646
+ @type.setter
2647
+ def type(self, value: Union[str, "AssertionExclusionWindowTypeClass"]) -> None:
2648
+ self._inner_dict['type'] = value
2649
+
2650
+
2651
+ @property
2652
+ def displayName(self) -> Union[None, str]:
2653
+ """Display name for this exclusion window"""
2654
+ return self._inner_dict.get('displayName') # type: ignore
2655
+
2656
+ @displayName.setter
2657
+ def displayName(self, value: Union[None, str]) -> None:
2658
+ self._inner_dict['displayName'] = value
2659
+
2660
+
2661
+ @property
2662
+ def fixedRange(self) -> Union[None, "AbsoluteTimeWindowClass"]:
2663
+ """Date window for one-off exclusion range
2664
+ Only used when type is FIXED_RANGE"""
2665
+ return self._inner_dict.get('fixedRange') # type: ignore
2666
+
2667
+ @fixedRange.setter
2668
+ def fixedRange(self, value: Union[None, "AbsoluteTimeWindowClass"]) -> None:
2669
+ self._inner_dict['fixedRange'] = value
2670
+
2671
+
2672
+ @property
2673
+ def weekly(self) -> Union[None, "WeeklyWindowClass"]:
2674
+ """Window of time each week to exclude."""
2675
+ return self._inner_dict.get('weekly') # type: ignore
2676
+
2677
+ @weekly.setter
2678
+ def weekly(self, value: Union[None, "WeeklyWindowClass"]) -> None:
2679
+ self._inner_dict['weekly'] = value
2680
+
2681
+
2682
+ @property
2683
+ def holiday(self) -> Union[None, "HolidayWindowClass"]:
2684
+ """The holiday to exclude
2685
+ Only used when type is HOLIDAY"""
2686
+ return self._inner_dict.get('holiday') # type: ignore
2687
+
2688
+ @holiday.setter
2689
+ def holiday(self, value: Union[None, "HolidayWindowClass"]) -> None:
2690
+ self._inner_dict['holiday'] = value
2691
+
2692
+
2693
+ class AssertionExclusionWindowTypeClass(object):
2694
+ # No docs available.
2695
+
2696
+ FIXED_RANGE = "FIXED_RANGE"
2697
+ """One-off time range to exclude"""
2698
+
2699
+ WEEKLY = "WEEKLY"
2700
+ """Recurring time range to exclude each week"""
2701
+
2702
+ HOLIDAY = "HOLIDAY"
2703
+ """Specific holiday to exclude"""
2704
+
2705
+
2706
+
2700
2707
  class AssertionInferenceDetailsClass(_Aspect):
2701
- """The details of an assertion that was inferred."""
2708
+ """The details of an assertion that was inferred.
2709
+
2710
+ Deprecated as an aspect for assertions!"""
2702
2711
 
2703
2712
 
2704
2713
  ASPECT_NAME = 'assertionInferenceDetails'
@@ -2986,6 +2995,31 @@ class AssertionInfoClass(_Aspect):
2986
2995
  self._inner_dict['description'] = value
2987
2996
 
2988
2997
 
2998
+ class AssertionMonitorSensitivityClass(DictWrapper):
2999
+ """Assertion monitor sensitivity is a measure of how sensitive the assertion monitor is to the assertion condition."""
3000
+
3001
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.assertion.AssertionMonitorSensitivity")
3002
+ def __init__(self,
3003
+ level: int,
3004
+ ):
3005
+ super().__init__()
3006
+
3007
+ self.level = level
3008
+
3009
+ def _restore_defaults(self) -> None:
3010
+ self.level = int()
3011
+
3012
+
3013
+ @property
3014
+ def level(self) -> int:
3015
+ """The assertion monitor level from 0-10"""
3016
+ return self._inner_dict.get('level') # type: ignore
3017
+
3018
+ @level.setter
3019
+ def level(self, value: int) -> None:
3020
+ self._inner_dict['level'] = value
3021
+
3022
+
2989
3023
  class AssertionResultClass(DictWrapper):
2990
3024
  """The result of running an assertion"""
2991
3025
 
@@ -3429,18 +3463,15 @@ class AssertionSourceClass(DictWrapper):
3429
3463
  def __init__(self,
3430
3464
  type: Union[str, "AssertionSourceTypeClass"],
3431
3465
  created: Union[None, "AuditStampClass"]=None,
3432
- assertionInferenceDetails: Union[None, "AssertionInferenceDetailsClass"]=None,
3433
3466
  ):
3434
3467
  super().__init__()
3435
3468
 
3436
3469
  self.type = type
3437
3470
  self.created = created
3438
- self.assertionInferenceDetails = assertionInferenceDetails
3439
3471
 
3440
3472
  def _restore_defaults(self) -> None:
3441
3473
  self.type = AssertionSourceTypeClass.NATIVE
3442
3474
  self.created = self.RECORD_SCHEMA.fields_dict["created"].default
3443
- self.assertionInferenceDetails = self.RECORD_SCHEMA.fields_dict["assertionInferenceDetails"].default
3444
3475
 
3445
3476
 
3446
3477
  @property
@@ -3464,17 +3495,6 @@ class AssertionSourceClass(DictWrapper):
3464
3495
  self._inner_dict['created'] = value
3465
3496
 
3466
3497
 
3467
- @property
3468
- def assertionInferenceDetails(self) -> Union[None, "AssertionInferenceDetailsClass"]:
3469
- """The details of an assertion that was inferred.
3470
- This field is only present if the sourceType is INFERRED."""
3471
- return self._inner_dict.get('assertionInferenceDetails') # type: ignore
3472
-
3473
- @assertionInferenceDetails.setter
3474
- def assertionInferenceDetails(self, value: Union[None, "AssertionInferenceDetailsClass"]) -> None:
3475
- self._inner_dict['assertionInferenceDetails'] = value
3476
-
3477
-
3478
3498
  class AssertionSourceTypeClass(object):
3479
3499
  # No docs available.
3480
3500
 
@@ -4579,7 +4599,7 @@ class FreshnessAssertionInfoClass(DictWrapper):
4579
4599
  def __init__(self,
4580
4600
  type: Union[str, "FreshnessAssertionTypeClass"],
4581
4601
  entity: str,
4582
- schedule: "FreshnessAssertionScheduleClass",
4602
+ schedule: Union[None, "FreshnessAssertionScheduleClass"]=None,
4583
4603
  filter: Union[None, "DatasetFilterClass"]=None,
4584
4604
  ):
4585
4605
  super().__init__()
@@ -4592,7 +4612,7 @@ class FreshnessAssertionInfoClass(DictWrapper):
4592
4612
  def _restore_defaults(self) -> None:
4593
4613
  self.type = FreshnessAssertionTypeClass.DATASET_CHANGE
4594
4614
  self.entity = str()
4595
- self.schedule = FreshnessAssertionScheduleClass._construct_with_defaults()
4615
+ self.schedule = self.RECORD_SCHEMA.fields_dict["schedule"].default
4596
4616
  self.filter = self.RECORD_SCHEMA.fields_dict["filter"].default
4597
4617
 
4598
4618
 
@@ -4617,12 +4637,12 @@ class FreshnessAssertionInfoClass(DictWrapper):
4617
4637
 
4618
4638
 
4619
4639
  @property
4620
- def schedule(self) -> "FreshnessAssertionScheduleClass":
4640
+ def schedule(self) -> Union[None, "FreshnessAssertionScheduleClass"]:
4621
4641
  """Produce FAILURE Assertion Result if the asset is not updated on the cadence and within the time range described by the schedule."""
4622
4642
  return self._inner_dict.get('schedule') # type: ignore
4623
4643
 
4624
4644
  @schedule.setter
4625
- def schedule(self, value: "FreshnessAssertionScheduleClass") -> None:
4645
+ def schedule(self, value: Union[None, "FreshnessAssertionScheduleClass"]) -> None:
4626
4646
  self._inner_dict['schedule'] = value
4627
4647
 
4628
4648
 
@@ -4645,17 +4665,20 @@ class FreshnessAssertionScheduleClass(DictWrapper):
4645
4665
  type: Union[str, "FreshnessAssertionScheduleTypeClass"],
4646
4666
  cron: Union[None, "FreshnessCronScheduleClass"]=None,
4647
4667
  fixedInterval: Union[None, "FixedIntervalScheduleClass"]=None,
4668
+ exclusions: Union[None, List["AssertionExclusionWindowClass"]]=None,
4648
4669
  ):
4649
4670
  super().__init__()
4650
4671
 
4651
4672
  self.type = type
4652
4673
  self.cron = cron
4653
4674
  self.fixedInterval = fixedInterval
4675
+ self.exclusions = exclusions
4654
4676
 
4655
4677
  def _restore_defaults(self) -> None:
4656
4678
  self.type = FreshnessAssertionScheduleTypeClass.CRON
4657
4679
  self.cron = self.RECORD_SCHEMA.fields_dict["cron"].default
4658
4680
  self.fixedInterval = self.RECORD_SCHEMA.fields_dict["fixedInterval"].default
4681
+ self.exclusions = self.RECORD_SCHEMA.fields_dict["exclusions"].default
4659
4682
 
4660
4683
 
4661
4684
  @property
@@ -4691,6 +4714,16 @@ class FreshnessAssertionScheduleClass(DictWrapper):
4691
4714
  self._inner_dict['fixedInterval'] = value
4692
4715
 
4693
4716
 
4717
+ @property
4718
+ def exclusions(self) -> Union[None, List["AssertionExclusionWindowClass"]]:
4719
+ """Blackout windows. Periods of time to exclude from the schedule."""
4720
+ return self._inner_dict.get('exclusions') # type: ignore
4721
+
4722
+ @exclusions.setter
4723
+ def exclusions(self, value: Union[None, List["AssertionExclusionWindowClass"]]) -> None:
4724
+ self._inner_dict['exclusions'] = value
4725
+
4726
+
4694
4727
  class FreshnessAssertionScheduleTypeClass(object):
4695
4728
  # No docs available.
4696
4729
 
@@ -8403,17 +8436,23 @@ class InstitutionalMemoryMetadataClass(DictWrapper):
8403
8436
  url: str,
8404
8437
  description: str,
8405
8438
  createStamp: "AuditStampClass",
8439
+ updateStamp: Union[None, "AuditStampClass"]=None,
8440
+ settings: Union[None, "InstitutionalMemoryMetadataSettingsClass"]=None,
8406
8441
  ):
8407
8442
  super().__init__()
8408
8443
 
8409
8444
  self.url = url
8410
8445
  self.description = description
8411
8446
  self.createStamp = createStamp
8447
+ self.updateStamp = updateStamp
8448
+ self.settings = settings
8412
8449
 
8413
8450
  def _restore_defaults(self) -> None:
8414
8451
  self.url = str()
8415
8452
  self.description = str()
8416
8453
  self.createStamp = AuditStampClass._construct_with_defaults()
8454
+ self.updateStamp = self.RECORD_SCHEMA.fields_dict["updateStamp"].default
8455
+ self.settings = self.RECORD_SCHEMA.fields_dict["settings"].default
8417
8456
 
8418
8457
 
8419
8458
  @property
@@ -8446,6 +8485,55 @@ class InstitutionalMemoryMetadataClass(DictWrapper):
8446
8485
  self._inner_dict['createStamp'] = value
8447
8486
 
8448
8487
 
8488
+ @property
8489
+ def updateStamp(self) -> Union[None, "AuditStampClass"]:
8490
+ """Audit stamp associated with updation of this record"""
8491
+ return self._inner_dict.get('updateStamp') # type: ignore
8492
+
8493
+ @updateStamp.setter
8494
+ def updateStamp(self, value: Union[None, "AuditStampClass"]) -> None:
8495
+ self._inner_dict['updateStamp'] = value
8496
+
8497
+
8498
+ @property
8499
+ def settings(self) -> Union[None, "InstitutionalMemoryMetadataSettingsClass"]:
8500
+ """Settings for this record"""
8501
+ return self._inner_dict.get('settings') # type: ignore
8502
+
8503
+ @settings.setter
8504
+ def settings(self, value: Union[None, "InstitutionalMemoryMetadataSettingsClass"]) -> None:
8505
+ self._inner_dict['settings'] = value
8506
+
8507
+
8508
+ class InstitutionalMemoryMetadataSettingsClass(DictWrapper):
8509
+ """Settings related to a record of InstitutionalMemoryMetadata"""
8510
+
8511
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.common.InstitutionalMemoryMetadataSettings")
8512
+ def __init__(self,
8513
+ showInAssetPreview: Optional[bool]=None,
8514
+ ):
8515
+ super().__init__()
8516
+
8517
+ if showInAssetPreview is None:
8518
+ # default: False
8519
+ self.showInAssetPreview = self.RECORD_SCHEMA.fields_dict["showInAssetPreview"].default
8520
+ else:
8521
+ self.showInAssetPreview = showInAssetPreview
8522
+
8523
+ def _restore_defaults(self) -> None:
8524
+ self.showInAssetPreview = self.RECORD_SCHEMA.fields_dict["showInAssetPreview"].default
8525
+
8526
+
8527
+ @property
8528
+ def showInAssetPreview(self) -> bool:
8529
+ """Show record in asset preview like on entity header and search previews"""
8530
+ return self._inner_dict.get('showInAssetPreview') # type: ignore
8531
+
8532
+ @showInAssetPreview.setter
8533
+ def showInAssetPreview(self, value: bool) -> None:
8534
+ self._inner_dict['showInAssetPreview'] = value
8535
+
8536
+
8449
8537
  class MLFeatureDataTypeClass(object):
8450
8538
  """MLFeature Data Type"""
8451
8539
 
@@ -14095,7 +14183,7 @@ class DataProductKeyClass(_Aspect):
14095
14183
 
14096
14184
 
14097
14185
  ASPECT_NAME = 'dataProductKey'
14098
- ASPECT_INFO = {'keyForEntity': 'dataProduct', 'entityCategory': 'core', 'entityAspects': ['ownership', 'glossaryTerms', 'globalTags', 'domains', 'dataProductProperties', 'institutionalMemory', 'status', 'structuredProperties', 'forms', 'testResults', 'share', 'origin']}
14186
+ ASPECT_INFO = {'keyForEntity': 'dataProduct', 'entityCategory': 'core', 'entityAspects': ['ownership', 'glossaryTerms', 'globalTags', 'domains', 'dataProductProperties', 'institutionalMemory', 'status', 'structuredProperties', 'forms', 'testResults', 'subTypes', 'share', 'origin']}
14099
14187
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.dataproduct.DataProductKey")
14100
14188
 
14101
14189
  def __init__(self,
@@ -16951,6 +17039,10 @@ class NotificationRecipientOriginTypeClass(object):
16951
17039
  GLOBAL_NOTIFICATION = "GLOBAL_NOTIFICATION"
16952
17040
  """The notification originated from a global change."""
16953
17041
 
17042
+ ACTOR_NOTIFICATION = "ACTOR_NOTIFICATION"
17043
+ """The notification originated from an actor-oriented default notification.
17044
+ For example, proposal task or ownership assignment notifications."""
17045
+
16954
17046
 
16955
17047
 
16956
17048
  class NotificationRecipientTypeClass(object):
@@ -17115,22 +17207,25 @@ class NotificationSettingsClass(DictWrapper):
17115
17207
  sinkTypes: List[Union[str, "NotificationSinkTypeClass"]],
17116
17208
  slackSettings: Union[None, "SlackNotificationSettingsClass"]=None,
17117
17209
  emailSettings: Union[None, "EmailNotificationSettingsClass"]=None,
17210
+ settings: Union[None, Dict[str, "NotificationSettingClass"]]=None,
17118
17211
  ):
17119
17212
  super().__init__()
17120
17213
 
17121
17214
  self.sinkTypes = sinkTypes
17122
17215
  self.slackSettings = slackSettings
17123
17216
  self.emailSettings = emailSettings
17217
+ self.settings = settings
17124
17218
 
17125
17219
  def _restore_defaults(self) -> None:
17126
17220
  self.sinkTypes = list()
17127
17221
  self.slackSettings = self.RECORD_SCHEMA.fields_dict["slackSettings"].default
17128
17222
  self.emailSettings = self.RECORD_SCHEMA.fields_dict["emailSettings"].default
17223
+ self.settings = self.RECORD_SCHEMA.fields_dict["settings"].default
17129
17224
 
17130
17225
 
17131
17226
  @property
17132
17227
  def sinkTypes(self) -> List[Union[str, "NotificationSinkTypeClass"]]:
17133
- """Sink types that notifications are sent to."""
17228
+ """Sink types that notifications are sent to. This is a top-level on/off switch."""
17134
17229
  return self._inner_dict.get('sinkTypes') # type: ignore
17135
17230
 
17136
17231
  @sinkTypes.setter
@@ -17158,6 +17253,19 @@ class NotificationSettingsClass(DictWrapper):
17158
17253
  self._inner_dict['emailSettings'] = value
17159
17254
 
17160
17255
 
17256
+ @property
17257
+ def settings(self) -> Union[None, Dict[str, "NotificationSettingClass"]]:
17258
+ """User or Group Notification Scenario Settings.
17259
+
17260
+ A map of notification scenario type to the settings associated with it.
17261
+ For a list of all scenario types to notify on, check out NotificationScenarioType enum."""
17262
+ return self._inner_dict.get('settings') # type: ignore
17263
+
17264
+ @settings.setter
17265
+ def settings(self, value: Union[None, Dict[str, "NotificationSettingClass"]]) -> None:
17266
+ self._inner_dict['settings'] = value
17267
+
17268
+
17161
17269
  class SlackNotificationSettingsClass(DictWrapper):
17162
17270
  """Slack Notification settings for an actor."""
17163
17271
 
@@ -17877,6 +17985,7 @@ class RemoteExecutorPoolInfoClass(_Aspect):
17877
17985
  creator: Union[None, str]=None,
17878
17986
  description: Union[None, str]=None,
17879
17987
  queueUrl: Union[None, str]=None,
17988
+ queueRegion: Union[None, str]=None,
17880
17989
  isEmbedded: Union[None, bool]=None,
17881
17990
  state: Union[None, "RemoteExecutorPoolStateClass"]=None,
17882
17991
  ):
@@ -17886,6 +17995,7 @@ class RemoteExecutorPoolInfoClass(_Aspect):
17886
17995
  self.creator = creator
17887
17996
  self.description = description
17888
17997
  self.queueUrl = queueUrl
17998
+ self.queueRegion = queueRegion
17889
17999
  self.isEmbedded = isEmbedded
17890
18000
  self.state = state
17891
18001
 
@@ -17894,6 +18004,7 @@ class RemoteExecutorPoolInfoClass(_Aspect):
17894
18004
  self.creator = self.RECORD_SCHEMA.fields_dict["creator"].default
17895
18005
  self.description = self.RECORD_SCHEMA.fields_dict["description"].default
17896
18006
  self.queueUrl = self.RECORD_SCHEMA.fields_dict["queueUrl"].default
18007
+ self.queueRegion = self.RECORD_SCHEMA.fields_dict["queueRegion"].default
17897
18008
  self.isEmbedded = self.RECORD_SCHEMA.fields_dict["isEmbedded"].default
17898
18009
  self.state = self.RECORD_SCHEMA.fields_dict["state"].default
17899
18010
 
@@ -17938,6 +18049,16 @@ class RemoteExecutorPoolInfoClass(_Aspect):
17938
18049
  self._inner_dict['queueUrl'] = value
17939
18050
 
17940
18051
 
18052
+ @property
18053
+ def queueRegion(self) -> Union[None, str]:
18054
+ """The region of the sqs queue for this pool for backwards compatibility."""
18055
+ return self._inner_dict.get('queueRegion') # type: ignore
18056
+
18057
+ @queueRegion.setter
18058
+ def queueRegion(self, value: Union[None, str]) -> None:
18059
+ self._inner_dict['queueRegion'] = value
18060
+
18061
+
17941
18062
  @property
17942
18063
  def isEmbedded(self) -> Union[None, bool]:
17943
18064
  """Only set true if this is the pool embedded within the DataHub Cloud deployment"""
@@ -21491,7 +21612,7 @@ class AnomalyKeyClass(_Aspect):
21491
21612
 
21492
21613
 
21493
21614
  ASPECT_NAME = 'anomalyKey'
21494
- ASPECT_INFO = {'keyForEntity': 'anomaly', 'entityCategory': 'core', 'entityAspects': ['anomalyInfo'], 'entityDoc': 'Anomalies represent issues that were detected based on inferred or auto-generated assertions.'}
21615
+ ASPECT_INFO = {}
21495
21616
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.AnomalyKey")
21496
21617
 
21497
21618
  def __init__(self,
@@ -21649,7 +21770,7 @@ class CorpGroupKeyClass(_Aspect):
21649
21770
 
21650
21771
 
21651
21772
  ASPECT_NAME = 'corpGroupKey'
21652
- ASPECT_INFO = {'keyForEntity': 'corpGroup', 'entityCategory': '_unset_', 'entityAspects': ['corpGroupInfo', 'corpGroupEditableInfo', 'globalTags', 'ownership', 'status', 'origin', 'roleMembership', 'structuredProperties', 'forms', 'testResults', 'corpGroupSettings', 'share'], 'entityDoc': 'CorpGroup represents an identity of a group of users in the enterprise.'}
21773
+ ASPECT_INFO = {'keyForEntity': 'corpGroup', 'entityCategory': '_unset_', 'entityAspects': ['corpGroupInfo', 'corpGroupEditableInfo', 'globalTags', 'ownership', 'status', 'origin', 'roleMembership', 'structuredProperties', 'forms', 'testResults', 'subTypes', 'corpGroupSettings', 'share'], 'entityDoc': 'CorpGroup represents an identity of a group of users in the enterprise.'}
21653
21774
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.CorpGroupKey")
21654
21775
 
21655
21776
  def __init__(self,
@@ -21678,7 +21799,7 @@ class CorpUserKeyClass(_Aspect):
21678
21799
 
21679
21800
 
21680
21801
  ASPECT_NAME = 'corpUserKey'
21681
- ASPECT_INFO = {'keyForEntity': 'corpuser', 'entityCategory': '_unset_', 'entityAspects': ['corpUserInfo', 'corpUserEditableInfo', 'corpUserStatus', 'groupMembership', 'globalTags', 'status', 'corpUserCredentials', 'nativeGroupMembership', 'corpUserSettings', 'origin', 'roleMembership', 'structuredProperties', 'forms', 'testResults', 'share', 'slackUserInfo'], 'entityDoc': 'CorpUser represents an identity of a person (or an account) in the enterprise.'}
21802
+ ASPECT_INFO = {'keyForEntity': 'corpuser', 'entityCategory': '_unset_', 'entityAspects': ['corpUserInfo', 'corpUserEditableInfo', 'corpUserStatus', 'groupMembership', 'globalTags', 'status', 'corpUserCredentials', 'nativeGroupMembership', 'corpUserSettings', 'origin', 'roleMembership', 'structuredProperties', 'forms', 'testResults', 'subTypes', 'slackUserInfo', 'share'], 'entityDoc': 'CorpUser represents an identity of a person (or an account) in the enterprise.'}
21682
21803
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.CorpUserKey")
21683
21804
 
21684
21805
  def __init__(self,
@@ -22365,7 +22486,7 @@ class DataProcessKeyClass(_Aspect):
22365
22486
 
22366
22487
 
22367
22488
  ASPECT_NAME = 'dataProcessKey'
22368
- ASPECT_INFO = {'keyForEntity': 'dataProcess', 'entityCategory': '_unset_', 'entityAspects': ['dataProcessInfo', 'ownership', 'status', 'testResults']}
22489
+ ASPECT_INFO = {'keyForEntity': 'dataProcess', 'entityCategory': '_unset_', 'entityAspects': ['dataProcessInfo', 'ownership', 'status', 'testResults', 'subTypes']}
22369
22490
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DataProcessKey")
22370
22491
 
22371
22492
  def __init__(self,
@@ -22653,7 +22774,7 @@ class GlossaryNodeKeyClass(_Aspect):
22653
22774
 
22654
22775
 
22655
22776
  ASPECT_NAME = 'glossaryNodeKey'
22656
- ASPECT_INFO = {'keyForEntity': 'glossaryNode', 'entityCategory': 'core', 'entityAspects': ['glossaryNodeInfo', 'institutionalMemory', 'ownership', 'status', 'structuredProperties', 'forms', 'testResults', 'displayProperties', 'share', 'origin']}
22777
+ ASPECT_INFO = {'keyForEntity': 'glossaryNode', 'entityCategory': 'core', 'entityAspects': ['glossaryNodeInfo', 'institutionalMemory', 'ownership', 'status', 'structuredProperties', 'forms', 'testResults', 'subTypes', 'displayProperties', 'share', 'origin']}
22657
22778
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.GlossaryNodeKey")
22658
22779
 
22659
22780
  def __init__(self,
@@ -22682,7 +22803,7 @@ class GlossaryTermKeyClass(_Aspect):
22682
22803
 
22683
22804
 
22684
22805
  ASPECT_NAME = 'glossaryTermKey'
22685
- ASPECT_INFO = {'keyForEntity': 'glossaryTerm', 'entityCategory': 'core', 'entityAspects': ['glossaryTermInfo', 'glossaryRelatedTerms', 'institutionalMemory', 'schemaMetadata', 'ownership', 'deprecation', 'domains', 'status', 'browsePaths', 'structuredProperties', 'forms', 'testResults', 'share', 'origin']}
22806
+ ASPECT_INFO = {'keyForEntity': 'glossaryTerm', 'entityCategory': 'core', 'entityAspects': ['glossaryTermInfo', 'glossaryRelatedTerms', 'institutionalMemory', 'schemaMetadata', 'ownership', 'deprecation', 'domains', 'status', 'browsePaths', 'structuredProperties', 'forms', 'testResults', 'subTypes', 'share', 'origin']}
22686
22807
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.GlossaryTermKey")
22687
22808
 
22688
22809
  def __init__(self,
@@ -22798,7 +22919,7 @@ class MLFeatureKeyClass(_Aspect):
22798
22919
 
22799
22920
 
22800
22921
  ASPECT_NAME = 'mlFeatureKey'
22801
- ASPECT_INFO = {'keyForEntity': 'mlFeature', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlFeatureProperties', 'domains', 'mlFeatureProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation', 'incidentsSummary']}
22922
+ ASPECT_INFO = {'keyForEntity': 'mlFeature', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlFeatureProperties', 'domains', 'mlFeatureProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'subTypes', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation', 'incidentsSummary']}
22802
22923
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLFeatureKey")
22803
22924
 
22804
22925
  def __init__(self,
@@ -22840,7 +22961,7 @@ class MLFeatureTableKeyClass(_Aspect):
22840
22961
 
22841
22962
 
22842
22963
  ASPECT_NAME = 'mlFeatureTableKey'
22843
- ASPECT_INFO = {'keyForEntity': 'mlFeatureTable', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlFeatureTableProperties', 'domains', 'mlFeatureTableProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation']}
22964
+ ASPECT_INFO = {'keyForEntity': 'mlFeatureTable', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlFeatureTableProperties', 'domains', 'mlFeatureTableProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'subTypes', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation']}
22844
22965
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLFeatureTableKey")
22845
22966
 
22846
22967
  def __init__(self,
@@ -22882,7 +23003,7 @@ class MLModelDeploymentKeyClass(_Aspect):
22882
23003
 
22883
23004
 
22884
23005
  ASPECT_NAME = 'mlModelDeploymentKey'
22885
- ASPECT_INFO = {'keyForEntity': 'mlModelDeployment', 'entityCategory': 'core', 'entityAspects': ['mlModelDeploymentProperties', 'ownership', 'status', 'deprecation', 'globalTags', 'dataPlatformInstance', 'testResults']}
23006
+ ASPECT_INFO = {'keyForEntity': 'mlModelDeployment', 'entityCategory': 'core', 'entityAspects': ['mlModelDeploymentProperties', 'ownership', 'status', 'deprecation', 'globalTags', 'dataPlatformInstance', 'testResults', 'container']}
22886
23007
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLModelDeploymentKey")
22887
23008
 
22888
23009
  def __init__(self,
@@ -22937,7 +23058,7 @@ class MLModelGroupKeyClass(_Aspect):
22937
23058
 
22938
23059
 
22939
23060
  ASPECT_NAME = 'mlModelGroupKey'
22940
- ASPECT_INFO = {'keyForEntity': 'mlModelGroup', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlModelGroupProperties', 'domains', 'mlModelGroupProperties', 'ownership', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation']}
23061
+ ASPECT_INFO = {'keyForEntity': 'mlModelGroup', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlModelGroupProperties', 'domains', 'mlModelGroupProperties', 'ownership', 'status', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'subTypes', 'container', 'institutionalMemory', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation']}
22941
23062
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLModelGroupKey")
22942
23063
 
22943
23064
  def __init__(self,
@@ -22992,7 +23113,7 @@ class MLModelKeyClass(_Aspect):
22992
23113
 
22993
23114
 
22994
23115
  ASPECT_NAME = 'mlModelKey'
22995
- ASPECT_INFO = {'keyForEntity': 'mlModel', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlModelProperties', 'domains', 'ownership', 'mlModelProperties', 'intendedUse', 'mlModelFactorPrompts', 'mlModelMetrics', 'mlModelEvaluationData', 'mlModelTrainingData', 'mlModelQuantitativeAnalyses', 'mlModelEthicalConsiderations', 'mlModelCaveatsAndRecommendations', 'institutionalMemory', 'sourceCode', 'status', 'cost', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'versionProperties', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation', 'incidentsSummary']}
23116
+ ASPECT_INFO = {'keyForEntity': 'mlModel', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlModelProperties', 'domains', 'ownership', 'mlModelProperties', 'intendedUse', 'mlModelFactorPrompts', 'mlModelMetrics', 'mlModelEvaluationData', 'mlModelTrainingData', 'mlModelQuantitativeAnalyses', 'mlModelEthicalConsiderations', 'mlModelCaveatsAndRecommendations', 'institutionalMemory', 'sourceCode', 'status', 'cost', 'deprecation', 'browsePaths', 'globalTags', 'dataPlatformInstance', 'browsePathsV2', 'structuredProperties', 'forms', 'testResults', 'versionProperties', 'subTypes', 'container', 'proposals', 'share', 'origin', 'lineageFeatures', 'documentation', 'incidentsSummary']}
22996
23117
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLModelKey")
22997
23118
 
22998
23119
  def __init__(self,
@@ -23047,7 +23168,7 @@ class MLPrimaryKeyKeyClass(_Aspect):
23047
23168
 
23048
23169
 
23049
23170
  ASPECT_NAME = 'mlPrimaryKeyKey'
23050
- ASPECT_INFO = {'keyForEntity': 'mlPrimaryKey', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlPrimaryKeyProperties', 'domains', 'mlPrimaryKeyProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'globalTags', 'dataPlatformInstance', 'structuredProperties', 'forms', 'testResults', 'proposals', 'share', 'origin', 'lineageFeatures']}
23171
+ ASPECT_INFO = {'keyForEntity': 'mlPrimaryKey', 'entityCategory': 'core', 'entityAspects': ['glossaryTerms', 'editableMlPrimaryKeyProperties', 'domains', 'mlPrimaryKeyProperties', 'ownership', 'institutionalMemory', 'status', 'deprecation', 'globalTags', 'dataPlatformInstance', 'structuredProperties', 'forms', 'testResults', 'subTypes', 'proposals', 'share', 'origin', 'lineageFeatures']}
23051
23172
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MLPrimaryKeyKey")
23052
23173
 
23053
23174
  def __init__(self,
@@ -23089,7 +23210,7 @@ class MonitorKeyClass(_Aspect):
23089
23210
 
23090
23211
 
23091
23212
  ASPECT_NAME = 'monitorKey'
23092
- ASPECT_INFO = {'keyForEntity': 'monitor', 'entityCategory': 'core', 'entityAspects': ['monitorInfo', 'monitorTimeseriesState']}
23213
+ ASPECT_INFO = {'keyForEntity': 'monitor', 'entityCategory': 'core', 'entityAspects': ['monitorInfo', 'monitorTimeseriesState', 'monitorAnomalyEvent']}
23093
23214
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MonitorKey")
23094
23215
 
23095
23216
  def __init__(self,
@@ -23126,6 +23247,35 @@ class MonitorKeyClass(_Aspect):
23126
23247
  self._inner_dict['id'] = value
23127
23248
 
23128
23249
 
23250
+ class MonitorSuiteKeyClass(_Aspect):
23251
+ """Key for a monitor suite."""
23252
+
23253
+
23254
+ ASPECT_NAME = 'monitorSuiteKey'
23255
+ ASPECT_INFO = {'keyForEntity': 'monitorSuite', 'entityCategory': 'core', 'entityAspects': ['monitorSuiteInfo']}
23256
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.MonitorSuiteKey")
23257
+
23258
+ def __init__(self,
23259
+ id: str,
23260
+ ):
23261
+ super().__init__()
23262
+
23263
+ self.id = id
23264
+
23265
+ def _restore_defaults(self) -> None:
23266
+ self.id = str()
23267
+
23268
+
23269
+ @property
23270
+ def id(self) -> str:
23271
+ """Unique id for the monitor suite."""
23272
+ return self._inner_dict.get('id') # type: ignore
23273
+
23274
+ @id.setter
23275
+ def id(self, value: str) -> None:
23276
+ self._inner_dict['id'] = value
23277
+
23278
+
23129
23279
  class NotebookKeyClass(_Aspect):
23130
23280
  """Key for a Notebook"""
23131
23281
 
@@ -23203,7 +23353,7 @@ class PostKeyClass(_Aspect):
23203
23353
 
23204
23354
 
23205
23355
  ASPECT_NAME = 'postKey'
23206
- ASPECT_INFO = {'keyForEntity': 'post', 'entityCategory': 'core', 'entityAspects': ['postInfo', 'status']}
23356
+ ASPECT_INFO = {'keyForEntity': 'post', 'entityCategory': 'core', 'entityAspects': ['postInfo', 'subTypes', 'status']}
23207
23357
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.PostKey")
23208
23358
 
23209
23359
  def __init__(self,
@@ -23420,7 +23570,7 @@ class SchemaFieldKeyClass(_Aspect):
23420
23570
 
23421
23571
 
23422
23572
  ASPECT_NAME = 'schemaFieldKey'
23423
- ASPECT_INFO = {'keyForEntity': 'schemaField', 'entityCategory': 'core', 'entityAspects': ['schemafieldInfo', 'structuredProperties', 'forms', 'businessAttributes', 'status', 'schemaFieldAliases', 'documentation', 'testResults', 'deprecation', 'schemaFieldProfile', 'lineageFeatures']}
23573
+ ASPECT_INFO = {'keyForEntity': 'schemaField', 'entityCategory': 'core', 'entityAspects': ['schemafieldInfo', 'structuredProperties', 'forms', 'businessAttributes', 'status', 'schemaFieldAliases', 'documentation', 'testResults', 'deprecation', 'subTypes', 'schemaFieldProfile', 'lineageFeatures']}
23424
23574
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.SchemaFieldKey")
23425
23575
 
23426
23576
  def __init__(self,
@@ -26887,6 +27037,7 @@ class MLModelGroupPropertiesClass(_Aspect):
26887
27037
  customProperties: Optional[Dict[str, str]]=None,
26888
27038
  trainingJobs: Union[None, List[str]]=None,
26889
27039
  downstreamJobs: Union[None, List[str]]=None,
27040
+ externalUrl: Union[None, str]=None,
26890
27041
  name: Union[None, str]=None,
26891
27042
  description: Union[None, str]=None,
26892
27043
  createdAt: Union[None, int]=None,
@@ -26903,6 +27054,7 @@ class MLModelGroupPropertiesClass(_Aspect):
26903
27054
  self.customProperties = customProperties
26904
27055
  self.trainingJobs = trainingJobs
26905
27056
  self.downstreamJobs = downstreamJobs
27057
+ self.externalUrl = externalUrl
26906
27058
  self.name = name
26907
27059
  self.description = description
26908
27060
  self.createdAt = createdAt
@@ -26914,6 +27066,7 @@ class MLModelGroupPropertiesClass(_Aspect):
26914
27066
  self.customProperties = dict()
26915
27067
  self.trainingJobs = self.RECORD_SCHEMA.fields_dict["trainingJobs"].default
26916
27068
  self.downstreamJobs = self.RECORD_SCHEMA.fields_dict["downstreamJobs"].default
27069
+ self.externalUrl = self.RECORD_SCHEMA.fields_dict["externalUrl"].default
26917
27070
  self.name = self.RECORD_SCHEMA.fields_dict["name"].default
26918
27071
  self.description = self.RECORD_SCHEMA.fields_dict["description"].default
26919
27072
  self.createdAt = self.RECORD_SCHEMA.fields_dict["createdAt"].default
@@ -26952,6 +27105,16 @@ class MLModelGroupPropertiesClass(_Aspect):
26952
27105
  self._inner_dict['downstreamJobs'] = value
26953
27106
 
26954
27107
 
27108
+ @property
27109
+ def externalUrl(self) -> Union[None, str]:
27110
+ """URL where the reference exist"""
27111
+ return self._inner_dict.get('externalUrl') # type: ignore
27112
+
27113
+ @externalUrl.setter
27114
+ def externalUrl(self, value: Union[None, str]) -> None:
27115
+ self._inner_dict['externalUrl'] = value
27116
+
27117
+
26955
27118
  @property
26956
27119
  def name(self) -> Union[None, str]:
26957
27120
  """Display name of the MLModelGroup"""
@@ -27662,15 +27825,18 @@ class AssertionEvaluationContextClass(DictWrapper):
27662
27825
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.monitor.AssertionEvaluationContext")
27663
27826
  def __init__(self,
27664
27827
  embeddedAssertions: Union[None, List["EmbeddedAssertionClass"]]=None,
27828
+ inferenceDetails: Union[None, "AssertionInferenceDetailsClass"]=None,
27665
27829
  stdDev: Union[None, float]=None,
27666
27830
  ):
27667
27831
  super().__init__()
27668
27832
 
27669
27833
  self.embeddedAssertions = embeddedAssertions
27834
+ self.inferenceDetails = inferenceDetails
27670
27835
  self.stdDev = stdDev
27671
27836
 
27672
27837
  def _restore_defaults(self) -> None:
27673
27838
  self.embeddedAssertions = self.RECORD_SCHEMA.fields_dict["embeddedAssertions"].default
27839
+ self.inferenceDetails = self.RECORD_SCHEMA.fields_dict["inferenceDetails"].default
27674
27840
  self.stdDev = self.RECORD_SCHEMA.fields_dict["stdDev"].default
27675
27841
 
27676
27842
 
@@ -27686,6 +27852,16 @@ class AssertionEvaluationContextClass(DictWrapper):
27686
27852
  self._inner_dict['embeddedAssertions'] = value
27687
27853
 
27688
27854
 
27855
+ @property
27856
+ def inferenceDetails(self) -> Union[None, "AssertionInferenceDetailsClass"]:
27857
+ """Details about the assertion inference."""
27858
+ return self._inner_dict.get('inferenceDetails') # type: ignore
27859
+
27860
+ @inferenceDetails.setter
27861
+ def inferenceDetails(self, value: Union[None, "AssertionInferenceDetailsClass"]) -> None:
27862
+ self._inner_dict['inferenceDetails'] = value
27863
+
27864
+
27689
27865
  @property
27690
27866
  def stdDev(self) -> Union[None, float]:
27691
27867
  """The std deviation of the metric values used for training.
@@ -27919,10 +28095,12 @@ class AssertionMonitorSettingsClass(DictWrapper):
27919
28095
 
27920
28096
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.monitor.AssertionMonitorSettings")
27921
28097
  def __init__(self,
28098
+ adjustmentSettings: Union[None, "AssertionAdjustmentSettingsClass"]=None,
27922
28099
  capabilities: Optional[List[Union[str, "AssertionMonitorCapabilityClass"]]]=None,
27923
28100
  ):
27924
28101
  super().__init__()
27925
28102
 
28103
+ self.adjustmentSettings = adjustmentSettings
27926
28104
  if capabilities is None:
27927
28105
  # default: ['ASSERTION_EVALUATION']
27928
28106
  self.capabilities = list()
@@ -27930,9 +28108,20 @@ class AssertionMonitorSettingsClass(DictWrapper):
27930
28108
  self.capabilities = capabilities
27931
28109
 
27932
28110
  def _restore_defaults(self) -> None:
28111
+ self.adjustmentSettings = self.RECORD_SCHEMA.fields_dict["adjustmentSettings"].default
27933
28112
  self.capabilities = list()
27934
28113
 
27935
28114
 
28115
+ @property
28116
+ def adjustmentSettings(self) -> Union[None, "AssertionAdjustmentSettingsClass"]:
28117
+ """In adjustment settings, the settings"""
28118
+ return self._inner_dict.get('adjustmentSettings') # type: ignore
28119
+
28120
+ @adjustmentSettings.setter
28121
+ def adjustmentSettings(self, value: Union[None, "AssertionAdjustmentSettingsClass"]) -> None:
28122
+ self._inner_dict['adjustmentSettings'] = value
28123
+
28124
+
27936
28125
  @property
27937
28126
  def capabilities(self) -> List[Union[str, "AssertionMonitorCapabilityClass"]]:
27938
28127
  """Capabilities that are currently enabled for the assertion monitor."""
@@ -28242,15 +28431,18 @@ class EmbeddedAssertionClass(DictWrapper):
28242
28431
  def __init__(self,
28243
28432
  assertion: Union[None, "AssertionInfoClass"]=None,
28244
28433
  evaluationTimeWindow: Union[None, "TimeWindowClass"]=None,
28434
+ context: Union[None, Dict[str, str]]=None,
28245
28435
  ):
28246
28436
  super().__init__()
28247
28437
 
28248
28438
  self.assertion = assertion
28249
28439
  self.evaluationTimeWindow = evaluationTimeWindow
28440
+ self.context = context
28250
28441
 
28251
28442
  def _restore_defaults(self) -> None:
28252
28443
  self.assertion = self.RECORD_SCHEMA.fields_dict["assertion"].default
28253
28444
  self.evaluationTimeWindow = self.RECORD_SCHEMA.fields_dict["evaluationTimeWindow"].default
28445
+ self.context = self.RECORD_SCHEMA.fields_dict["context"].default
28254
28446
 
28255
28447
 
28256
28448
  @property
@@ -28273,6 +28465,62 @@ class EmbeddedAssertionClass(DictWrapper):
28273
28465
  self._inner_dict['evaluationTimeWindow'] = value
28274
28466
 
28275
28467
 
28468
+ @property
28469
+ def context(self) -> Union[None, Dict[str, str]]:
28470
+ """Context about the embedded assertion prediction"""
28471
+ return self._inner_dict.get('context') # type: ignore
28472
+
28473
+ @context.setter
28474
+ def context(self, value: Union[None, Dict[str, str]]) -> None:
28475
+ self._inner_dict['context'] = value
28476
+
28477
+
28478
+ class MonitorErrorClass(DictWrapper):
28479
+ """Error details for the monitor"""
28480
+
28481
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.monitor.MonitorError")
28482
+ def __init__(self,
28483
+ type: Union[str, "MonitorErrorTypeClass"],
28484
+ message: Union[None, str]=None,
28485
+ ):
28486
+ super().__init__()
28487
+
28488
+ self.type = type
28489
+ self.message = message
28490
+
28491
+ def _restore_defaults(self) -> None:
28492
+ self.type = MonitorErrorTypeClass.UNKNOWN
28493
+ self.message = self.RECORD_SCHEMA.fields_dict["message"].default
28494
+
28495
+
28496
+ @property
28497
+ def type(self) -> Union[str, "MonitorErrorTypeClass"]:
28498
+ """The error for the monitor"""
28499
+ return self._inner_dict.get('type') # type: ignore
28500
+
28501
+ @type.setter
28502
+ def type(self, value: Union[str, "MonitorErrorTypeClass"]) -> None:
28503
+ self._inner_dict['type'] = value
28504
+
28505
+
28506
+ @property
28507
+ def message(self) -> Union[None, str]:
28508
+ """Optional custom message for the error"""
28509
+ return self._inner_dict.get('message') # type: ignore
28510
+
28511
+ @message.setter
28512
+ def message(self, value: Union[None, str]) -> None:
28513
+ self._inner_dict['message'] = value
28514
+
28515
+
28516
+ class MonitorErrorTypeClass(object):
28517
+ # No docs available.
28518
+
28519
+ UNKNOWN = "UNKNOWN"
28520
+ """An unknown error occurred"""
28521
+
28522
+
28523
+
28276
28524
  class MonitorInfoClass(_Aspect):
28277
28525
  """Information about an asset monitor."""
28278
28526
 
@@ -28386,19 +28634,42 @@ class MonitorModeClass(object):
28386
28634
 
28387
28635
 
28388
28636
 
28637
+ class MonitorStateClass(object):
28638
+ # No docs available.
28639
+
28640
+ TRAINING = "TRAINING"
28641
+ """The monitor is in the training stage."""
28642
+
28643
+ EVALUATION = "EVALUATION"
28644
+ """The monitor is in the evaluation stage."""
28645
+
28646
+ ERROR = "ERROR"
28647
+ """The monitor is in the evaluation stage."""
28648
+
28649
+
28650
+
28389
28651
  class MonitorStatusClass(DictWrapper):
28390
28652
  """The status of an asset monitor"""
28391
28653
 
28392
28654
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.monitor.MonitorStatus")
28393
28655
  def __init__(self,
28394
28656
  mode: Union[str, "MonitorModeClass"],
28657
+ state: Union[None, Union[str, "MonitorStateClass"]]=None,
28658
+ error: Union[None, "MonitorErrorClass"]=None,
28659
+ reviewedAt: Union[None, int]=None,
28395
28660
  ):
28396
28661
  super().__init__()
28397
28662
 
28398
28663
  self.mode = mode
28664
+ self.state = state
28665
+ self.error = error
28666
+ self.reviewedAt = reviewedAt
28399
28667
 
28400
28668
  def _restore_defaults(self) -> None:
28401
28669
  self.mode = MonitorModeClass.ACTIVE
28670
+ self.state = self.RECORD_SCHEMA.fields_dict["state"].default
28671
+ self.error = self.RECORD_SCHEMA.fields_dict["error"].default
28672
+ self.reviewedAt = self.RECORD_SCHEMA.fields_dict["reviewedAt"].default
28402
28673
 
28403
28674
 
28404
28675
  @property
@@ -28411,6 +28682,297 @@ class MonitorStatusClass(DictWrapper):
28411
28682
  self._inner_dict['mode'] = value
28412
28683
 
28413
28684
 
28685
+ @property
28686
+ def state(self) -> Union[None, Union[str, "MonitorStateClass"]]:
28687
+ """The start time of the monitor"""
28688
+ return self._inner_dict.get('state') # type: ignore
28689
+
28690
+ @state.setter
28691
+ def state(self, value: Union[None, Union[str, "MonitorStateClass"]]) -> None:
28692
+ self._inner_dict['state'] = value
28693
+
28694
+
28695
+ @property
28696
+ def error(self) -> Union[None, "MonitorErrorClass"]:
28697
+ """The last time the monitor was started"""
28698
+ return self._inner_dict.get('error') # type: ignore
28699
+
28700
+ @error.setter
28701
+ def error(self, value: Union[None, "MonitorErrorClass"]) -> None:
28702
+ self._inner_dict['error'] = value
28703
+
28704
+
28705
+ @property
28706
+ def reviewedAt(self) -> Union[None, int]:
28707
+ """The last time the monitor was started"""
28708
+ return self._inner_dict.get('reviewedAt') # type: ignore
28709
+
28710
+ @reviewedAt.setter
28711
+ def reviewedAt(self, value: Union[None, int]) -> None:
28712
+ self._inner_dict['reviewedAt'] = value
28713
+
28714
+
28715
+ class MonitorSuiteAssertionSettingsClass(DictWrapper):
28716
+ """Settings for assertion monitors within a monitor suite."""
28717
+
28718
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.monitor.MonitorSuiteAssertionSettings")
28719
+ def __init__(self,
28720
+ schedule: Union[None, "FixedIntervalScheduleClass"]=None,
28721
+ trainingDataLookbackWindowDays: Optional[int]=None,
28722
+ action: Union[None, "AssertionActionClass"]=None,
28723
+ exclusionWindows: Union[None, List["AssertionExclusionWindowClass"]]=None,
28724
+ ):
28725
+ super().__init__()
28726
+
28727
+ self.schedule = schedule
28728
+ if trainingDataLookbackWindowDays is None:
28729
+ # default: 60
28730
+ self.trainingDataLookbackWindowDays = self.RECORD_SCHEMA.fields_dict["trainingDataLookbackWindowDays"].default
28731
+ else:
28732
+ self.trainingDataLookbackWindowDays = trainingDataLookbackWindowDays
28733
+ self.action = action
28734
+ self.exclusionWindows = exclusionWindows
28735
+
28736
+ def _restore_defaults(self) -> None:
28737
+ self.schedule = self.RECORD_SCHEMA.fields_dict["schedule"].default
28738
+ self.trainingDataLookbackWindowDays = self.RECORD_SCHEMA.fields_dict["trainingDataLookbackWindowDays"].default
28739
+ self.action = self.RECORD_SCHEMA.fields_dict["action"].default
28740
+ self.exclusionWindows = self.RECORD_SCHEMA.fields_dict["exclusionWindows"].default
28741
+
28742
+
28743
+ @property
28744
+ def schedule(self) -> Union[None, "FixedIntervalScheduleClass"]:
28745
+ """The schedule for the assertion monitors within this group.
28746
+ Not applicable for freshness anomaly monitors."""
28747
+ return self._inner_dict.get('schedule') # type: ignore
28748
+
28749
+ @schedule.setter
28750
+ def schedule(self, value: Union[None, "FixedIntervalScheduleClass"]) -> None:
28751
+ self._inner_dict['schedule'] = value
28752
+
28753
+
28754
+ @property
28755
+ def trainingDataLookbackWindowDays(self) -> int:
28756
+ """The lookback window configuration for retrieving training data points.
28757
+ Defines how far back in time to look when gathering data for training.
28758
+ Defaults to 60 days if not specified."""
28759
+ return self._inner_dict.get('trainingDataLookbackWindowDays') # type: ignore
28760
+
28761
+ @trainingDataLookbackWindowDays.setter
28762
+ def trainingDataLookbackWindowDays(self, value: int) -> None:
28763
+ self._inner_dict['trainingDataLookbackWindowDays'] = value
28764
+
28765
+
28766
+ @property
28767
+ def action(self) -> Union[None, "AssertionActionClass"]:
28768
+ """The action to take when an assertion evaluates.
28769
+ I.e., raise an incident if the assertion fails."""
28770
+ return self._inner_dict.get('action') # type: ignore
28771
+
28772
+ @action.setter
28773
+ def action(self, value: Union[None, "AssertionActionClass"]) -> None:
28774
+ self._inner_dict['action'] = value
28775
+
28776
+
28777
+ @property
28778
+ def exclusionWindows(self) -> Union[None, List["AssertionExclusionWindowClass"]]:
28779
+ """The exclusion windows for the assertion monitors within this group."""
28780
+ return self._inner_dict.get('exclusionWindows') # type: ignore
28781
+
28782
+ @exclusionWindows.setter
28783
+ def exclusionWindows(self, value: Union[None, List["AssertionExclusionWindowClass"]]) -> None:
28784
+ self._inner_dict['exclusionWindows'] = value
28785
+
28786
+
28787
+ class MonitorSuiteAssignmentSpecClass(DictWrapper):
28788
+ """Information about entities that should be monitored by a monitor suite."""
28789
+
28790
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.monitor.MonitorSuiteAssignmentSpec")
28791
+ def __init__(self,
28792
+ filter: "FilterClass",
28793
+ json: Union[None, str]=None,
28794
+ ):
28795
+ super().__init__()
28796
+
28797
+ self.filter = filter
28798
+ self.json = json
28799
+
28800
+ def _restore_defaults(self) -> None:
28801
+ self.filter = FilterClass._construct_with_defaults()
28802
+ self.json = self.RECORD_SCHEMA.fields_dict["json"].default
28803
+
28804
+
28805
+ @property
28806
+ def filter(self) -> "FilterClass":
28807
+ """The filter applied when assigning this form to entities. Entities that match this filter
28808
+ will have this form applied to them. Right now this filter only supports filtering by
28809
+ platform, entity type, container, term, tag, domain, and urns through the UI."""
28810
+ return self._inner_dict.get('filter') # type: ignore
28811
+
28812
+ @filter.setter
28813
+ def filter(self, value: "FilterClass") -> None:
28814
+ self._inner_dict['filter'] = value
28815
+
28816
+
28817
+ @property
28818
+ def json(self) -> Union[None, str]:
28819
+ """The stringified json representing the logical predicate built in the UI to select assets.
28820
+ This predicate is turned into orFilters to send through graphql since graphql doesn't support
28821
+ arbitrary nesting. This string is used to restore the UI for this logical predicate."""
28822
+ return self._inner_dict.get('json') # type: ignore
28823
+
28824
+ @json.setter
28825
+ def json(self, value: Union[None, str]) -> None:
28826
+ self._inner_dict['json'] = value
28827
+
28828
+
28829
+ class MonitorSuiteInfoClass(_Aspect):
28830
+ """Information about an asset monitor."""
28831
+
28832
+
28833
+ ASPECT_NAME = 'monitorSuiteInfo'
28834
+ ASPECT_INFO = {}
28835
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.monitor.MonitorSuiteInfo")
28836
+
28837
+ def __init__(self,
28838
+ type: Union[str, "MonitorSuiteTypeClass"],
28839
+ assignment: "MonitorSuiteAssignmentSpecClass",
28840
+ assertionSettings: Union[None, "MonitorSuiteAssertionSettingsClass"]=None,
28841
+ notificationSettings: Union[None, "MonitorSuiteNotificationSettingsClass"]=None,
28842
+ created: Union[None, "AuditStampClass"]=None,
28843
+ updated: Union[None, "AuditStampClass"]=None,
28844
+ ):
28845
+ super().__init__()
28846
+
28847
+ self.type = type
28848
+ self.assignment = assignment
28849
+ self.assertionSettings = assertionSettings
28850
+ self.notificationSettings = notificationSettings
28851
+ self.created = created
28852
+ self.updated = updated
28853
+
28854
+ def _restore_defaults(self) -> None:
28855
+ self.type = MonitorSuiteTypeClass.FRESHNESS
28856
+ self.assignment = MonitorSuiteAssignmentSpecClass._construct_with_defaults()
28857
+ self.assertionSettings = self.RECORD_SCHEMA.fields_dict["assertionSettings"].default
28858
+ self.notificationSettings = self.RECORD_SCHEMA.fields_dict["notificationSettings"].default
28859
+ self.created = self.RECORD_SCHEMA.fields_dict["created"].default
28860
+ self.updated = self.RECORD_SCHEMA.fields_dict["updated"].default
28861
+
28862
+
28863
+ @property
28864
+ def type(self) -> Union[str, "MonitorSuiteTypeClass"]:
28865
+ """Type of monitor suite."""
28866
+ return self._inner_dict.get('type') # type: ignore
28867
+
28868
+ @type.setter
28869
+ def type(self, value: Union[str, "MonitorSuiteTypeClass"]) -> None:
28870
+ self._inner_dict['type'] = value
28871
+
28872
+
28873
+ @property
28874
+ def assignment(self) -> "MonitorSuiteAssignmentSpecClass":
28875
+ """The assignment rule to define the entities that are monitored by this group."""
28876
+ return self._inner_dict.get('assignment') # type: ignore
28877
+
28878
+ @assignment.setter
28879
+ def assignment(self, value: "MonitorSuiteAssignmentSpecClass") -> None:
28880
+ self._inner_dict['assignment'] = value
28881
+
28882
+
28883
+ @property
28884
+ def assertionSettings(self) -> Union[None, "MonitorSuiteAssertionSettingsClass"]:
28885
+ """Settings for assertion monitors within this group."""
28886
+ return self._inner_dict.get('assertionSettings') # type: ignore
28887
+
28888
+ @assertionSettings.setter
28889
+ def assertionSettings(self, value: Union[None, "MonitorSuiteAssertionSettingsClass"]) -> None:
28890
+ self._inner_dict['assertionSettings'] = value
28891
+
28892
+
28893
+ @property
28894
+ def notificationSettings(self) -> Union[None, "MonitorSuiteNotificationSettingsClass"]:
28895
+ """Settings for notifications from monitors within this group."""
28896
+ return self._inner_dict.get('notificationSettings') # type: ignore
28897
+
28898
+ @notificationSettings.setter
28899
+ def notificationSettings(self, value: Union[None, "MonitorSuiteNotificationSettingsClass"]) -> None:
28900
+ self._inner_dict['notificationSettings'] = value
28901
+
28902
+
28903
+ @property
28904
+ def created(self) -> Union[None, "AuditStampClass"]:
28905
+ """Created Audit stamp"""
28906
+ return self._inner_dict.get('created') # type: ignore
28907
+
28908
+ @created.setter
28909
+ def created(self, value: Union[None, "AuditStampClass"]) -> None:
28910
+ self._inner_dict['created'] = value
28911
+
28912
+
28913
+ @property
28914
+ def updated(self) -> Union[None, "AuditStampClass"]:
28915
+ """Updated Audit stamp"""
28916
+ return self._inner_dict.get('updated') # type: ignore
28917
+
28918
+ @updated.setter
28919
+ def updated(self, value: Union[None, "AuditStampClass"]) -> None:
28920
+ self._inner_dict['updated'] = value
28921
+
28922
+
28923
+ class MonitorSuiteNotificationSettingsClass(DictWrapper):
28924
+ """Settings for notifications from monitors within a monitor suite."""
28925
+
28926
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.monitor.MonitorSuiteNotificationSettings")
28927
+ def __init__(self,
28928
+ entityChangeTypes: List["EntityChangeDetailsClass"],
28929
+ notificationConfig: "SubscriptionNotificationConfigClass",
28930
+ ):
28931
+ super().__init__()
28932
+
28933
+ self.entityChangeTypes = entityChangeTypes
28934
+ self.notificationConfig = notificationConfig
28935
+
28936
+ def _restore_defaults(self) -> None:
28937
+ self.entityChangeTypes = list()
28938
+ self.notificationConfig = SubscriptionNotificationConfigClass._construct_with_defaults()
28939
+
28940
+
28941
+ @property
28942
+ def entityChangeTypes(self) -> List["EntityChangeDetailsClass"]:
28943
+ """The change types that trigger a notification for the monitor suite.
28944
+ NOTE: only use Assertion and Incident change types for monitor suites."""
28945
+ return self._inner_dict.get('entityChangeTypes') # type: ignore
28946
+
28947
+ @entityChangeTypes.setter
28948
+ def entityChangeTypes(self, value: List["EntityChangeDetailsClass"]) -> None:
28949
+ self._inner_dict['entityChangeTypes'] = value
28950
+
28951
+
28952
+ @property
28953
+ def notificationConfig(self) -> "SubscriptionNotificationConfigClass":
28954
+ """The notification config for the monitor suite."""
28955
+ return self._inner_dict.get('notificationConfig') # type: ignore
28956
+
28957
+ @notificationConfig.setter
28958
+ def notificationConfig(self, value: "SubscriptionNotificationConfigClass") -> None:
28959
+ self._inner_dict['notificationConfig'] = value
28960
+
28961
+
28962
+ class MonitorSuiteTypeClass(object):
28963
+ # No docs available.
28964
+
28965
+ FRESHNESS = "FRESHNESS"
28966
+ """A monitor responsible for evaluating freshness anomalies."""
28967
+
28968
+ VOLUME = "VOLUME"
28969
+ """A monitor responsible for evaluating volume anomalies."""
28970
+
28971
+ DATA_SCHEMA = "DATA_SCHEMA"
28972
+ """A monitor responsible for evaluating schema changes."""
28973
+
28974
+
28975
+
28414
28976
  class MonitorTimeseriesStateClass(_Aspect):
28415
28977
  """Stats required to evaluate continuous monitors."""
28416
28978
 
@@ -30681,12 +31243,18 @@ class QueryPropertiesClass(_Aspect):
30681
31243
  source: Union[str, "QuerySourceClass"],
30682
31244
  created: "AuditStampClass",
30683
31245
  lastModified: "AuditStampClass",
31246
+ customProperties: Optional[Dict[str, str]]=None,
30684
31247
  name: Union[None, str]=None,
30685
31248
  description: Union[None, str]=None,
30686
31249
  origin: Union[None, str]=None,
30687
31250
  ):
30688
31251
  super().__init__()
30689
31252
 
31253
+ if customProperties is None:
31254
+ # default: {}
31255
+ self.customProperties = dict()
31256
+ else:
31257
+ self.customProperties = customProperties
30690
31258
  self.statement = statement
30691
31259
  self.source = source
30692
31260
  self.name = name
@@ -30696,6 +31264,7 @@ class QueryPropertiesClass(_Aspect):
30696
31264
  self.origin = origin
30697
31265
 
30698
31266
  def _restore_defaults(self) -> None:
31267
+ self.customProperties = dict()
30699
31268
  self.statement = QueryStatementClass._construct_with_defaults()
30700
31269
  self.source = QuerySourceClass.MANUAL
30701
31270
  self.name = self.RECORD_SCHEMA.fields_dict["name"].default
@@ -30705,6 +31274,16 @@ class QueryPropertiesClass(_Aspect):
30705
31274
  self.origin = self.RECORD_SCHEMA.fields_dict["origin"].default
30706
31275
 
30707
31276
 
31277
+ @property
31278
+ def customProperties(self) -> Dict[str, str]:
31279
+ """Custom property bag."""
31280
+ return self._inner_dict.get('customProperties') # type: ignore
31281
+
31282
+ @customProperties.setter
31283
+ def customProperties(self, value: Dict[str, str]) -> None:
31284
+ self._inner_dict['customProperties'] = value
31285
+
31286
+
30708
31287
  @property
30709
31288
  def statement(self) -> "QueryStatementClass":
30710
31289
  """The Query Statement."""
@@ -32964,7 +33543,9 @@ class NotificationSettingClass(DictWrapper):
32964
33543
 
32965
33544
  @property
32966
33545
  def params(self) -> Union[None, Dict[str, str]]:
32967
- """Custom set of setting parameters."""
33546
+ """Custom set of setting parameters.
33547
+ Currently used to store sink enabled / disabled settings.
33548
+ E.g. slack.enabled = true, email.enabled = false"""
32968
33549
  return self._inner_dict.get('params') # type: ignore
32969
33550
 
32970
33551
  @params.setter
@@ -35484,6 +36065,44 @@ class TestStatusClass(DictWrapper):
35484
36065
  self._inner_dict['mode'] = value
35485
36066
 
35486
36067
 
36068
+ class AbsoluteTimeWindowClass(DictWrapper):
36069
+ # No docs available.
36070
+
36071
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.timeseries.AbsoluteTimeWindow")
36072
+ def __init__(self,
36073
+ startTimeMillis: int,
36074
+ endTimeMillis: int,
36075
+ ):
36076
+ super().__init__()
36077
+
36078
+ self.startTimeMillis = startTimeMillis
36079
+ self.endTimeMillis = endTimeMillis
36080
+
36081
+ def _restore_defaults(self) -> None:
36082
+ self.startTimeMillis = int()
36083
+ self.endTimeMillis = int()
36084
+
36085
+
36086
+ @property
36087
+ def startTimeMillis(self) -> int:
36088
+ """Start time as epoch at UTC."""
36089
+ return self._inner_dict.get('startTimeMillis') # type: ignore
36090
+
36091
+ @startTimeMillis.setter
36092
+ def startTimeMillis(self, value: int) -> None:
36093
+ self._inner_dict['startTimeMillis'] = value
36094
+
36095
+
36096
+ @property
36097
+ def endTimeMillis(self) -> int:
36098
+ """End time as epoch at UTC."""
36099
+ return self._inner_dict.get('endTimeMillis') # type: ignore
36100
+
36101
+ @endTimeMillis.setter
36102
+ def endTimeMillis(self, value: int) -> None:
36103
+ self._inner_dict['endTimeMillis'] = value
36104
+
36105
+
35487
36106
  class CalendarIntervalClass(object):
35488
36107
  # No docs available.
35489
36108
 
@@ -35497,6 +36116,71 @@ class CalendarIntervalClass(object):
35497
36116
  YEAR = "YEAR"
35498
36117
 
35499
36118
 
36119
+ class DayOfWeekClass(object):
36120
+ # No docs available.
36121
+
36122
+ MONDAY = "MONDAY"
36123
+ TUESDAY = "TUESDAY"
36124
+ WEDNESDAY = "WEDNESDAY"
36125
+ THURSDAY = "THURSDAY"
36126
+ FRIDAY = "FRIDAY"
36127
+ SATURDAY = "SATURDAY"
36128
+ SUNDAY = "SUNDAY"
36129
+
36130
+
36131
+ class HolidayWindowClass(DictWrapper):
36132
+ # No docs available.
36133
+
36134
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.timeseries.HolidayWindow")
36135
+ def __init__(self,
36136
+ name: str,
36137
+ region: Union[None, str]=None,
36138
+ timezone: Union[None, str]=None,
36139
+ ):
36140
+ super().__init__()
36141
+
36142
+ self.name = name
36143
+ self.region = region
36144
+ self.timezone = timezone
36145
+
36146
+ def _restore_defaults(self) -> None:
36147
+ self.name = str()
36148
+ self.region = self.RECORD_SCHEMA.fields_dict["region"].default
36149
+ self.timezone = self.RECORD_SCHEMA.fields_dict["timezone"].default
36150
+
36151
+
36152
+ @property
36153
+ def name(self) -> str:
36154
+ """The name of the holiday."""
36155
+ return self._inner_dict.get('name') # type: ignore
36156
+
36157
+ @name.setter
36158
+ def name(self, value: str) -> None:
36159
+ self._inner_dict['name'] = value
36160
+
36161
+
36162
+ @property
36163
+ def region(self) -> Union[None, str]:
36164
+ """The region of the holiday.
36165
+ Example: 'US'"""
36166
+ return self._inner_dict.get('region') # type: ignore
36167
+
36168
+ @region.setter
36169
+ def region(self, value: Union[None, str]) -> None:
36170
+ self._inner_dict['region'] = value
36171
+
36172
+
36173
+ @property
36174
+ def timezone(self) -> Union[None, str]:
36175
+ """The timezone of the holiday.
36176
+ Example: 'America/New_York'"""
36177
+ return self._inner_dict.get('timezone') # type: ignore
36178
+
36179
+ @timezone.setter
36180
+ def timezone(self, value: Union[None, str]) -> None:
36181
+ self._inner_dict['timezone'] = value
36182
+
36183
+
35500
36184
  class PartitionSpecClass(DictWrapper):
35501
36185
  """A reference to a specific partition in a dataset."""
35502
36186
 
@@ -35641,6 +36325,78 @@ class TimeWindowSizeClass(DictWrapper):
35641
36325
  self._inner_dict['multiple'] = value
35642
36326
 
35643
36327
 
36328
+ class WeeklyWindowClass(DictWrapper):
36329
+ """Represents a recurring time window that repeats weekly.
36330
+ Used to define exclusion periods for model training based on day of week and time of day."""
36331
+
36332
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.timeseries.WeeklyWindow")
36333
+ def __init__(self,
36334
+ daysOfWeek: List[Union[str, "DayOfWeekClass"]],
36335
+ startTime: Union[None, str]=None,
36336
+ endTime: Union[None, str]=None,
36337
+ timezone: Union[None, str]=None,
36338
+ ):
36339
+ super().__init__()
36340
+
36341
+ self.daysOfWeek = daysOfWeek
36342
+ self.startTime = startTime
36343
+ self.endTime = endTime
36344
+ self.timezone = timezone
36345
+
36346
+ def _restore_defaults(self) -> None:
36347
+ self.daysOfWeek = list()
36348
+ self.startTime = self.RECORD_SCHEMA.fields_dict["startTime"].default
36349
+ self.endTime = self.RECORD_SCHEMA.fields_dict["endTime"].default
36350
+ self.timezone = self.RECORD_SCHEMA.fields_dict["timezone"].default
36351
+
36352
+
36353
+ @property
36354
+ def daysOfWeek(self) -> List[Union[str, "DayOfWeekClass"]]:
36355
+ """Days of the week to include in the window.
36356
+ If not specified, all days are included."""
36357
+ return self._inner_dict.get('daysOfWeek') # type: ignore
36358
+
36359
+ @daysOfWeek.setter
36360
+ def daysOfWeek(self, value: List[Union[str, "DayOfWeekClass"]]) -> None:
36361
+ self._inner_dict['daysOfWeek'] = value
36362
+
36363
+
36364
+ @property
36365
+ def startTime(self) -> Union[None, str]:
36366
+ """Start time for the window on each specified day.
36367
+ Format: "HH:MM" in 24-hour format (e.g., "09:00" for 9am, "17:30" for 5:30pm).
36368
+ If not specified, defaults to start of day ("00:00")."""
36369
+ return self._inner_dict.get('startTime') # type: ignore
36370
+
36371
+ @startTime.setter
36372
+ def startTime(self, value: Union[None, str]) -> None:
36373
+ self._inner_dict['startTime'] = value
36374
+
36375
+
36376
+ @property
36377
+ def endTime(self) -> Union[None, str]:
36378
+ """End time for the window on each specified day.
36379
+ Format: "HH:MM" in 24-hour format (e.g., "17:00" for 5pm, "23:59" for end of day).
36380
+ If not specified, defaults to end of day ("23:59")."""
36381
+ return self._inner_dict.get('endTime') # type: ignore
36382
+
36383
+ @endTime.setter
36384
+ def endTime(self, value: Union[None, str]) -> None:
36385
+ self._inner_dict['endTime'] = value
36386
+
36387
+
36388
+ @property
36389
+ def timezone(self) -> Union[None, str]:
36390
+ """Time zone to interpret start and end times in.
36391
+ Uses standard time zone identifiers (e.g., "America/Los_Angeles", "UTC").
36392
+ If not specified, defaults to UTC."""
36393
+ return self._inner_dict.get('timezone') # type: ignore
36394
+
36395
+ @timezone.setter
36396
+ def timezone(self, value: Union[None, str]) -> None:
36397
+ self._inner_dict['timezone'] = value
36398
+
36399
+
35644
36400
  class DataHubUpgradeRequestClass(_Aspect):
35645
36401
  """Information collected when kicking off a DataHubUpgrade"""
35646
36402
 
@@ -36234,16 +36990,11 @@ __SCHEMA_TYPES = {
36234
36990
  'com.linkedin.pegasus2avro.ai.GlossaryTermsInferenceMetadata': GlossaryTermsInferenceMetadataClass,
36235
36991
  'com.linkedin.pegasus2avro.ai.InferenceGroupMetadata': InferenceGroupMetadataClass,
36236
36992
  'com.linkedin.pegasus2avro.ai.InferenceMetadata': InferenceMetadataClass,
36237
- 'com.linkedin.pegasus2avro.anomaly.AnomalyInfo': AnomalyInfoClass,
36238
- 'com.linkedin.pegasus2avro.anomaly.AnomalyReview': AnomalyReviewClass,
36239
36993
  'com.linkedin.pegasus2avro.anomaly.AnomalyReviewState': AnomalyReviewStateClass,
36240
36994
  'com.linkedin.pegasus2avro.anomaly.AnomalySource': AnomalySourceClass,
36241
36995
  'com.linkedin.pegasus2avro.anomaly.AnomalySourceProperties': AnomalySourcePropertiesClass,
36242
36996
  'com.linkedin.pegasus2avro.anomaly.AnomalySourceType': AnomalySourceTypeClass,
36243
- 'com.linkedin.pegasus2avro.anomaly.AnomalyState': AnomalyStateClass,
36244
- 'com.linkedin.pegasus2avro.anomaly.AnomalyStatus': AnomalyStatusClass,
36245
- 'com.linkedin.pegasus2avro.anomaly.AnomalyStatusProperties': AnomalyStatusPropertiesClass,
36246
- 'com.linkedin.pegasus2avro.anomaly.AnomalyType': AnomalyTypeClass,
36997
+ 'com.linkedin.pegasus2avro.anomaly.MonitorAnomalyEvent': MonitorAnomalyEventClass,
36247
36998
  'com.linkedin.pegasus2avro.assertion.AdjustmentAlgorithm': AdjustmentAlgorithmClass,
36248
36999
  'com.linkedin.pegasus2avro.assertion.AssertionAction': AssertionActionClass,
36249
37000
  'com.linkedin.pegasus2avro.assertion.AssertionActionType': AssertionActionTypeClass,
@@ -36252,8 +37003,11 @@ __SCHEMA_TYPES = {
36252
37003
  'com.linkedin.pegasus2avro.assertion.AssertionAnalyticsRunEvent': AssertionAnalyticsRunEventClass,
36253
37004
  'com.linkedin.pegasus2avro.assertion.AssertionDryRunEvent': AssertionDryRunEventClass,
36254
37005
  'com.linkedin.pegasus2avro.assertion.AssertionDryRunResult': AssertionDryRunResultClass,
37006
+ 'com.linkedin.pegasus2avro.assertion.AssertionExclusionWindow': AssertionExclusionWindowClass,
37007
+ 'com.linkedin.pegasus2avro.assertion.AssertionExclusionWindowType': AssertionExclusionWindowTypeClass,
36255
37008
  'com.linkedin.pegasus2avro.assertion.AssertionInferenceDetails': AssertionInferenceDetailsClass,
36256
37009
  'com.linkedin.pegasus2avro.assertion.AssertionInfo': AssertionInfoClass,
37010
+ 'com.linkedin.pegasus2avro.assertion.AssertionMonitorSensitivity': AssertionMonitorSensitivityClass,
36257
37011
  'com.linkedin.pegasus2avro.assertion.AssertionResult': AssertionResultClass,
36258
37012
  'com.linkedin.pegasus2avro.assertion.AssertionResultError': AssertionResultErrorClass,
36259
37013
  'com.linkedin.pegasus2avro.assertion.AssertionResultErrorType': AssertionResultErrorTypeClass,
@@ -36362,6 +37116,7 @@ __SCHEMA_TYPES = {
36362
37116
  'com.linkedin.pegasus2avro.common.InputFields': InputFieldsClass,
36363
37117
  'com.linkedin.pegasus2avro.common.InstitutionalMemory': InstitutionalMemoryClass,
36364
37118
  'com.linkedin.pegasus2avro.common.InstitutionalMemoryMetadata': InstitutionalMemoryMetadataClass,
37119
+ 'com.linkedin.pegasus2avro.common.InstitutionalMemoryMetadataSettings': InstitutionalMemoryMetadataSettingsClass,
36365
37120
  'com.linkedin.pegasus2avro.common.MLFeatureDataType': MLFeatureDataTypeClass,
36366
37121
  'com.linkedin.pegasus2avro.common.Media': MediaClass,
36367
37122
  'com.linkedin.pegasus2avro.common.MediaType': MediaTypeClass,
@@ -36627,6 +37382,7 @@ __SCHEMA_TYPES = {
36627
37382
  'com.linkedin.pegasus2avro.metadata.key.MLModelKey': MLModelKeyClass,
36628
37383
  'com.linkedin.pegasus2avro.metadata.key.MLPrimaryKeyKey': MLPrimaryKeyKeyClass,
36629
37384
  'com.linkedin.pegasus2avro.metadata.key.MonitorKey': MonitorKeyClass,
37385
+ 'com.linkedin.pegasus2avro.metadata.key.MonitorSuiteKey': MonitorSuiteKeyClass,
36630
37386
  'com.linkedin.pegasus2avro.metadata.key.NotebookKey': NotebookKeyClass,
36631
37387
  'com.linkedin.pegasus2avro.metadata.key.OwnershipTypeKey': OwnershipTypeKeyClass,
36632
37388
  'com.linkedin.pegasus2avro.metadata.key.PostKey': PostKeyClass,
@@ -36736,9 +37492,17 @@ __SCHEMA_TYPES = {
36736
37492
  'com.linkedin.pegasus2avro.monitor.DatasetVolumeAssertionParameters': DatasetVolumeAssertionParametersClass,
36737
37493
  'com.linkedin.pegasus2avro.monitor.DatasetVolumeSourceType': DatasetVolumeSourceTypeClass,
36738
37494
  'com.linkedin.pegasus2avro.monitor.EmbeddedAssertion': EmbeddedAssertionClass,
37495
+ 'com.linkedin.pegasus2avro.monitor.MonitorError': MonitorErrorClass,
37496
+ 'com.linkedin.pegasus2avro.monitor.MonitorErrorType': MonitorErrorTypeClass,
36739
37497
  'com.linkedin.pegasus2avro.monitor.MonitorInfo': MonitorInfoClass,
36740
37498
  'com.linkedin.pegasus2avro.monitor.MonitorMode': MonitorModeClass,
37499
+ 'com.linkedin.pegasus2avro.monitor.MonitorState': MonitorStateClass,
36741
37500
  'com.linkedin.pegasus2avro.monitor.MonitorStatus': MonitorStatusClass,
37501
+ 'com.linkedin.pegasus2avro.monitor.MonitorSuiteAssertionSettings': MonitorSuiteAssertionSettingsClass,
37502
+ 'com.linkedin.pegasus2avro.monitor.MonitorSuiteAssignmentSpec': MonitorSuiteAssignmentSpecClass,
37503
+ 'com.linkedin.pegasus2avro.monitor.MonitorSuiteInfo': MonitorSuiteInfoClass,
37504
+ 'com.linkedin.pegasus2avro.monitor.MonitorSuiteNotificationSettings': MonitorSuiteNotificationSettingsClass,
37505
+ 'com.linkedin.pegasus2avro.monitor.MonitorSuiteType': MonitorSuiteTypeClass,
36742
37506
  'com.linkedin.pegasus2avro.monitor.MonitorTimeseriesState': MonitorTimeseriesStateClass,
36743
37507
  'com.linkedin.pegasus2avro.monitor.MonitorType': MonitorTypeClass,
36744
37508
  'com.linkedin.pegasus2avro.mxe.GenericAspect': GenericAspectClass,
@@ -36876,11 +37640,15 @@ __SCHEMA_TYPES = {
36876
37640
  'com.linkedin.pegasus2avro.test.TestSource': TestSourceClass,
36877
37641
  'com.linkedin.pegasus2avro.test.TestSourceType': TestSourceTypeClass,
36878
37642
  'com.linkedin.pegasus2avro.test.TestStatus': TestStatusClass,
37643
+ 'com.linkedin.pegasus2avro.timeseries.AbsoluteTimeWindow': AbsoluteTimeWindowClass,
36879
37644
  'com.linkedin.pegasus2avro.timeseries.CalendarInterval': CalendarIntervalClass,
37645
+ 'com.linkedin.pegasus2avro.timeseries.DayOfWeek': DayOfWeekClass,
37646
+ 'com.linkedin.pegasus2avro.timeseries.HolidayWindow': HolidayWindowClass,
36880
37647
  'com.linkedin.pegasus2avro.timeseries.PartitionSpec': PartitionSpecClass,
36881
37648
  'com.linkedin.pegasus2avro.timeseries.PartitionType': PartitionTypeClass,
36882
37649
  'com.linkedin.pegasus2avro.timeseries.TimeWindow': TimeWindowClass,
36883
37650
  'com.linkedin.pegasus2avro.timeseries.TimeWindowSize': TimeWindowSizeClass,
37651
+ 'com.linkedin.pegasus2avro.timeseries.WeeklyWindow': WeeklyWindowClass,
36884
37652
  'com.linkedin.pegasus2avro.upgrade.DataHubUpgradeRequest': DataHubUpgradeRequestClass,
36885
37653
  'com.linkedin.pegasus2avro.upgrade.DataHubUpgradeResult': DataHubUpgradeResultClass,
36886
37654
  'com.linkedin.pegasus2avro.upgrade.DataHubUpgradeState': DataHubUpgradeStateClass,
@@ -36923,16 +37691,11 @@ __SCHEMA_TYPES = {
36923
37691
  'GlossaryTermsInferenceMetadata': GlossaryTermsInferenceMetadataClass,
36924
37692
  'InferenceGroupMetadata': InferenceGroupMetadataClass,
36925
37693
  'InferenceMetadata': InferenceMetadataClass,
36926
- 'AnomalyInfo': AnomalyInfoClass,
36927
- 'AnomalyReview': AnomalyReviewClass,
36928
37694
  'AnomalyReviewState': AnomalyReviewStateClass,
36929
37695
  'AnomalySource': AnomalySourceClass,
36930
37696
  'AnomalySourceProperties': AnomalySourcePropertiesClass,
36931
37697
  'AnomalySourceType': AnomalySourceTypeClass,
36932
- 'AnomalyState': AnomalyStateClass,
36933
- 'AnomalyStatus': AnomalyStatusClass,
36934
- 'AnomalyStatusProperties': AnomalyStatusPropertiesClass,
36935
- 'AnomalyType': AnomalyTypeClass,
37698
+ 'MonitorAnomalyEvent': MonitorAnomalyEventClass,
36936
37699
  'AdjustmentAlgorithm': AdjustmentAlgorithmClass,
36937
37700
  'AssertionAction': AssertionActionClass,
36938
37701
  'AssertionActionType': AssertionActionTypeClass,
@@ -36941,8 +37704,11 @@ __SCHEMA_TYPES = {
36941
37704
  'AssertionAnalyticsRunEvent': AssertionAnalyticsRunEventClass,
36942
37705
  'AssertionDryRunEvent': AssertionDryRunEventClass,
36943
37706
  'AssertionDryRunResult': AssertionDryRunResultClass,
37707
+ 'AssertionExclusionWindow': AssertionExclusionWindowClass,
37708
+ 'AssertionExclusionWindowType': AssertionExclusionWindowTypeClass,
36944
37709
  'AssertionInferenceDetails': AssertionInferenceDetailsClass,
36945
37710
  'AssertionInfo': AssertionInfoClass,
37711
+ 'AssertionMonitorSensitivity': AssertionMonitorSensitivityClass,
36946
37712
  'AssertionResult': AssertionResultClass,
36947
37713
  'AssertionResultError': AssertionResultErrorClass,
36948
37714
  'AssertionResultErrorType': AssertionResultErrorTypeClass,
@@ -37051,6 +37817,7 @@ __SCHEMA_TYPES = {
37051
37817
  'InputFields': InputFieldsClass,
37052
37818
  'InstitutionalMemory': InstitutionalMemoryClass,
37053
37819
  'InstitutionalMemoryMetadata': InstitutionalMemoryMetadataClass,
37820
+ 'InstitutionalMemoryMetadataSettings': InstitutionalMemoryMetadataSettingsClass,
37054
37821
  'MLFeatureDataType': MLFeatureDataTypeClass,
37055
37822
  'Media': MediaClass,
37056
37823
  'MediaType': MediaTypeClass,
@@ -37316,6 +38083,7 @@ __SCHEMA_TYPES = {
37316
38083
  'MLModelKey': MLModelKeyClass,
37317
38084
  'MLPrimaryKeyKey': MLPrimaryKeyKeyClass,
37318
38085
  'MonitorKey': MonitorKeyClass,
38086
+ 'MonitorSuiteKey': MonitorSuiteKeyClass,
37319
38087
  'NotebookKey': NotebookKeyClass,
37320
38088
  'OwnershipTypeKey': OwnershipTypeKeyClass,
37321
38089
  'PostKey': PostKeyClass,
@@ -37425,9 +38193,17 @@ __SCHEMA_TYPES = {
37425
38193
  'DatasetVolumeAssertionParameters': DatasetVolumeAssertionParametersClass,
37426
38194
  'DatasetVolumeSourceType': DatasetVolumeSourceTypeClass,
37427
38195
  'EmbeddedAssertion': EmbeddedAssertionClass,
38196
+ 'MonitorError': MonitorErrorClass,
38197
+ 'MonitorErrorType': MonitorErrorTypeClass,
37428
38198
  'MonitorInfo': MonitorInfoClass,
37429
38199
  'MonitorMode': MonitorModeClass,
38200
+ 'MonitorState': MonitorStateClass,
37430
38201
  'MonitorStatus': MonitorStatusClass,
38202
+ 'MonitorSuiteAssertionSettings': MonitorSuiteAssertionSettingsClass,
38203
+ 'MonitorSuiteAssignmentSpec': MonitorSuiteAssignmentSpecClass,
38204
+ 'MonitorSuiteInfo': MonitorSuiteInfoClass,
38205
+ 'MonitorSuiteNotificationSettings': MonitorSuiteNotificationSettingsClass,
38206
+ 'MonitorSuiteType': MonitorSuiteTypeClass,
37431
38207
  'MonitorTimeseriesState': MonitorTimeseriesStateClass,
37432
38208
  'MonitorType': MonitorTypeClass,
37433
38209
  'GenericAspect': GenericAspectClass,
@@ -37565,11 +38341,15 @@ __SCHEMA_TYPES = {
37565
38341
  'TestSource': TestSourceClass,
37566
38342
  'TestSourceType': TestSourceTypeClass,
37567
38343
  'TestStatus': TestStatusClass,
38344
+ 'AbsoluteTimeWindow': AbsoluteTimeWindowClass,
37568
38345
  'CalendarInterval': CalendarIntervalClass,
38346
+ 'DayOfWeek': DayOfWeekClass,
38347
+ 'HolidayWindow': HolidayWindowClass,
37569
38348
  'PartitionSpec': PartitionSpecClass,
37570
38349
  'PartitionType': PartitionTypeClass,
37571
38350
  'TimeWindow': TimeWindowClass,
37572
38351
  'TimeWindowSize': TimeWindowSizeClass,
38352
+ 'WeeklyWindow': WeeklyWindowClass,
37573
38353
  'DataHubUpgradeRequest': DataHubUpgradeRequestClass,
37574
38354
  'DataHubUpgradeResult': DataHubUpgradeResultClass,
37575
38355
  'DataHubUpgradeState': DataHubUpgradeStateClass,
@@ -37651,6 +38431,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
37651
38431
  EditableDataJobPropertiesClass,
37652
38432
  DatahubIngestionRunSummaryClass,
37653
38433
  DatahubIngestionCheckpointClass,
38434
+ MonitorSuiteInfoClass,
37654
38435
  MonitorInfoClass,
37655
38436
  MonitorTimeseriesStateClass,
37656
38437
  SchemaFieldAliasesClass,
@@ -37730,7 +38511,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
37730
38511
  DataProcessInstanceRunEventClass,
37731
38512
  DataProcessInfoClass,
37732
38513
  DataProcessInstanceOutputClass,
37733
- AnomalyInfoClass,
38514
+ MonitorAnomalyEventClass,
37734
38515
  SchemaFieldProfileClass,
37735
38516
  EditableDatasetPropertiesClass,
37736
38517
  DatasetProfileClass,
@@ -37801,6 +38582,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
37801
38582
  TagKeyClass,
37802
38583
  DataHubPolicyKeyClass,
37803
38584
  ConstraintKeyClass,
38585
+ MonitorSuiteKeyClass,
37804
38586
  GlossaryNodeKeyClass,
37805
38587
  DataProcessKeyClass,
37806
38588
  DataHubStepStateKeyClass,
@@ -37928,6 +38710,7 @@ class AspectBag(TypedDict, total=False):
37928
38710
  editableDataJobProperties: EditableDataJobPropertiesClass
37929
38711
  datahubIngestionRunSummary: DatahubIngestionRunSummaryClass
37930
38712
  datahubIngestionCheckpoint: DatahubIngestionCheckpointClass
38713
+ monitorSuiteInfo: MonitorSuiteInfoClass
37931
38714
  monitorInfo: MonitorInfoClass
37932
38715
  monitorTimeseriesState: MonitorTimeseriesStateClass
37933
38716
  schemaFieldAliases: SchemaFieldAliasesClass
@@ -38007,7 +38790,7 @@ class AspectBag(TypedDict, total=False):
38007
38790
  dataProcessInstanceRunEvent: DataProcessInstanceRunEventClass
38008
38791
  dataProcessInfo: DataProcessInfoClass
38009
38792
  dataProcessInstanceOutput: DataProcessInstanceOutputClass
38010
- anomalyInfo: AnomalyInfoClass
38793
+ monitorAnomalyEvent: MonitorAnomalyEventClass
38011
38794
  schemaFieldProfile: SchemaFieldProfileClass
38012
38795
  editableDatasetProperties: EditableDatasetPropertiesClass
38013
38796
  datasetProfile: DatasetProfileClass
@@ -38078,6 +38861,7 @@ class AspectBag(TypedDict, total=False):
38078
38861
  tagKey: TagKeyClass
38079
38862
  dataHubPolicyKey: DataHubPolicyKeyClass
38080
38863
  constraintKey: ConstraintKeyClass
38864
+ monitorSuiteKey: MonitorSuiteKeyClass
38081
38865
  glossaryNodeKey: GlossaryNodeKeyClass
38082
38866
  dataProcessKey: DataProcessKeyClass
38083
38867
  dataHubStepStateKey: DataHubStepStateKeyClass
@@ -38164,7 +38948,6 @@ KEY_ASPECTS: Dict[str, Type[_Aspect]] = {
38164
38948
  'chart': ChartKeyClass,
38165
38949
  'mlModelGroup': MLModelGroupKeyClass,
38166
38950
  'subscription': SubscriptionKeyClass,
38167
- 'anomaly': AnomalyKeyClass,
38168
38951
  'dataHubRemoteExecutorPool': RemoteExecutorPoolKeyClass,
38169
38952
  'dataJob': DataJobKeyClass,
38170
38953
  'domain': DomainKeyClass,
@@ -38178,6 +38961,7 @@ KEY_ASPECTS: Dict[str, Type[_Aspect]] = {
38178
38961
  'tag': TagKeyClass,
38179
38962
  'dataHubPolicy': DataHubPolicyKeyClass,
38180
38963
  'constraint': ConstraintKeyClass,
38964
+ 'monitorSuite': MonitorSuiteKeyClass,
38181
38965
  'glossaryNode': GlossaryNodeKeyClass,
38182
38966
  'dataProcess': DataProcessKeyClass,
38183
38967
  'dataHubStepState': DataHubStepStateKeyClass,
@@ -38235,7 +39019,6 @@ ENTITY_TYPE_NAMES: List[str] = [
38235
39019
  'chart',
38236
39020
  'mlModelGroup',
38237
39021
  'subscription',
38238
- 'anomaly',
38239
39022
  'dataHubRemoteExecutorPool',
38240
39023
  'dataJob',
38241
39024
  'domain',
@@ -38249,6 +39032,7 @@ ENTITY_TYPE_NAMES: List[str] = [
38249
39032
  'tag',
38250
39033
  'dataHubPolicy',
38251
39034
  'constraint',
39035
+ 'monitorSuite',
38252
39036
  'glossaryNode',
38253
39037
  'dataProcess',
38254
39038
  'dataHubStepState',
@@ -38305,7 +39089,6 @@ EntityTypeName = Literal[
38305
39089
  'chart',
38306
39090
  'mlModelGroup',
38307
39091
  'subscription',
38308
- 'anomaly',
38309
39092
  'dataHubRemoteExecutorPool',
38310
39093
  'dataJob',
38311
39094
  'domain',
@@ -38319,6 +39102,7 @@ EntityTypeName = Literal[
38319
39102
  'tag',
38320
39103
  'dataHubPolicy',
38321
39104
  'constraint',
39105
+ 'monitorSuite',
38322
39106
  'glossaryNode',
38323
39107
  'dataProcess',
38324
39108
  'dataHubStepState',