acryl-datahub 0.15.0.5rc5__py3-none-any.whl → 0.15.0.5rc7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub might be problematic. Click here for more details.

Files changed (25) hide show
  1. {acryl_datahub-0.15.0.5rc5.dist-info → acryl_datahub-0.15.0.5rc7.dist-info}/METADATA +2423 -2416
  2. {acryl_datahub-0.15.0.5rc5.dist-info → acryl_datahub-0.15.0.5rc7.dist-info}/RECORD +25 -21
  3. {acryl_datahub-0.15.0.5rc5.dist-info → acryl_datahub-0.15.0.5rc7.dist-info}/entry_points.txt +1 -0
  4. datahub/_version.py +1 -1
  5. datahub/api/entities/dataprocess/dataprocess_instance.py +104 -11
  6. datahub/cli/container_cli.py +1 -64
  7. datahub/emitter/composite_emitter.py +36 -0
  8. datahub/ingestion/source/apply/__init__.py +0 -0
  9. datahub/ingestion/source/apply/datahub_apply.py +223 -0
  10. datahub/ingestion/source/dbt/dbt_core.py +1 -1
  11. datahub/ingestion/source/sql/sql_config.py +0 -10
  12. datahub/ingestion/source_config/operation_config.py +9 -0
  13. datahub/metadata/_schema_classes.py +144 -4
  14. datahub/metadata/com/linkedin/pegasus2avro/common/__init__.py +6 -0
  15. datahub/metadata/schema.avsc +115 -5
  16. datahub/metadata/schemas/AssertionInfo.avsc +2 -2
  17. datahub/metadata/schemas/CorpUserSettings.avsc +9 -0
  18. datahub/metadata/schemas/Deprecation.avsc +12 -0
  19. datahub/metadata/schemas/DisplayProperties.avsc +62 -0
  20. datahub/metadata/schemas/MetadataChangeEvent.avsc +12 -0
  21. datahub/metadata/schemas/PostInfo.avsc +28 -2
  22. datahub/metadata/schemas/SchemaFieldKey.avsc +2 -1
  23. {acryl_datahub-0.15.0.5rc5.dist-info → acryl_datahub-0.15.0.5rc7.dist-info}/LICENSE +0 -0
  24. {acryl_datahub-0.15.0.5rc5.dist-info → acryl_datahub-0.15.0.5rc7.dist-info}/WHEEL +0 -0
  25. {acryl_datahub-0.15.0.5rc5.dist-info → acryl_datahub-0.15.0.5rc7.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,223 @@
1
+ import logging
2
+ from functools import partial
3
+ from typing import Any, Iterable, List, Optional, Union
4
+
5
+ import progressbar
6
+ from pydantic import Field
7
+
8
+ from datahub.configuration.common import ConfigModel
9
+ from datahub.emitter.mcp import MetadataChangeProposalWrapper
10
+ from datahub.ingestion.api.common import PipelineContext
11
+ from datahub.ingestion.api.decorators import (
12
+ SupportStatus,
13
+ config_class,
14
+ platform_name,
15
+ support_status,
16
+ )
17
+ from datahub.ingestion.api.source import MetadataWorkUnitProcessor, Source, SourceReport
18
+ from datahub.ingestion.api.source_helpers import auto_workunit_reporter
19
+ from datahub.ingestion.api.workunit import MetadataWorkUnit
20
+ from datahub.ingestion.graph.client import DataHubGraph, get_default_graph
21
+ from datahub.metadata.schema_classes import (
22
+ DomainsClass,
23
+ GlossaryTermAssociationClass,
24
+ MetadataChangeProposalClass,
25
+ OwnerClass,
26
+ OwnershipTypeClass,
27
+ TagAssociationClass,
28
+ )
29
+ from datahub.specific.dataset import DatasetPatchBuilder
30
+
31
+ logger = logging.getLogger(__name__)
32
+
33
+
34
+ def apply_association_to_container(
35
+ container_urn: str,
36
+ association_urn: str,
37
+ association_type: str,
38
+ emit: bool = True,
39
+ graph: Optional[DataHubGraph] = None,
40
+ ) -> Optional[List[Union[MetadataChangeProposalWrapper, MetadataChangeProposalClass]]]:
41
+ """
42
+ Common function to add either tags, terms, domains, or owners to child datasets (for now).
43
+
44
+ Args:
45
+ container_urn: The URN of the container
46
+ association_urn: The URN of the tag, term, or user to apply
47
+ association_type: One of 'tag', 'term', 'domain' or 'owner'
48
+ """
49
+ urns: List[str] = [container_urn]
50
+ if not graph:
51
+ graph = get_default_graph()
52
+ logger.info(f"Using {graph}")
53
+ urns.extend(
54
+ graph.get_urns_by_filter(
55
+ container=container_urn,
56
+ batch_size=1000,
57
+ entity_types=["dataset", "container"],
58
+ )
59
+ )
60
+
61
+ all_patches: List[Any] = []
62
+ for urn in urns:
63
+ builder = DatasetPatchBuilder(urn)
64
+ patches: List[Any] = []
65
+ if association_type == "tag":
66
+ patches = builder.add_tag(TagAssociationClass(association_urn)).build()
67
+ elif association_type == "term":
68
+ patches = builder.add_term(
69
+ GlossaryTermAssociationClass(association_urn)
70
+ ).build()
71
+ elif association_type == "owner":
72
+ patches = builder.add_owner(
73
+ OwnerClass(
74
+ owner=association_urn,
75
+ type=OwnershipTypeClass.TECHNICAL_OWNER,
76
+ )
77
+ ).build()
78
+ elif association_type == "domain":
79
+ patches = [
80
+ MetadataChangeProposalWrapper(
81
+ entityUrn=urn,
82
+ aspect=DomainsClass(domains=[association_urn]),
83
+ )
84
+ ]
85
+ all_patches.extend(patches)
86
+ if emit:
87
+ mcps_iter = progressbar.progressbar(all_patches, redirect_stdout=True)
88
+ for mcp in mcps_iter:
89
+ graph.emit(mcp)
90
+ return None
91
+ else:
92
+ return all_patches
93
+
94
+
95
+ class DomainApplyConfig(ConfigModel):
96
+ assets: List[str] = Field(
97
+ default_factory=list,
98
+ description="List of assets to apply domain hierarchichaly. Currently only containers and datasets are supported",
99
+ )
100
+ domain_urn: str = Field(default="")
101
+
102
+
103
+ class TagApplyConfig(ConfigModel):
104
+ assets: List[str] = Field(
105
+ default_factory=list,
106
+ description="List of assets to apply tag hierarchichaly. Currently only containers and datasets are supported",
107
+ )
108
+ tag_urn: str = Field(default="")
109
+
110
+
111
+ class TermApplyConfig(ConfigModel):
112
+ assets: List[str] = Field(
113
+ default_factory=list,
114
+ description="List of assets to apply term hierarchichaly. Currently only containers and datasets are supported",
115
+ )
116
+ term_urn: str = Field(default="")
117
+
118
+
119
+ class OwnerApplyConfig(ConfigModel):
120
+ assets: List[str] = Field(
121
+ default_factory=list,
122
+ description="List of assets to apply owner hierarchichaly. Currently only containers and datasets are supported",
123
+ )
124
+ owner_urn: str = Field(default="")
125
+
126
+
127
+ class DataHubApplyConfig(ConfigModel):
128
+ domain_apply: Optional[List[DomainApplyConfig]] = Field(
129
+ default=None,
130
+ description="List to apply domains to assets",
131
+ )
132
+ tag_apply: Optional[List[TagApplyConfig]] = Field(
133
+ default=None,
134
+ description="List to apply tags to assets",
135
+ )
136
+ term_apply: Optional[List[TermApplyConfig]] = Field(
137
+ default=None,
138
+ description="List to apply terms to assets",
139
+ )
140
+ owner_apply: Optional[List[OwnerApplyConfig]] = Field(
141
+ default=None,
142
+ description="List to apply owners to assets",
143
+ )
144
+
145
+
146
+ @platform_name("DataHubApply")
147
+ @config_class(DataHubApplyConfig)
148
+ @support_status(SupportStatus.TESTING)
149
+ class DataHubApplySource(Source):
150
+ """
151
+ This source is a helper over CLI
152
+ so people can use the helper to apply various metadata changes to DataHub
153
+ via Managed Ingestion
154
+ """
155
+
156
+ def __init__(self, ctx: PipelineContext, config: DataHubApplyConfig):
157
+ self.ctx = ctx
158
+ self.config = config
159
+ self.report = SourceReport()
160
+ self.graph = ctx.require_graph()
161
+
162
+ def _yield_workunits(
163
+ self,
164
+ proposals: List[
165
+ Union[MetadataChangeProposalWrapper, MetadataChangeProposalClass]
166
+ ],
167
+ ) -> Iterable[MetadataWorkUnit]:
168
+ for proposal in proposals:
169
+ if isinstance(proposal, MetadataChangeProposalWrapper):
170
+ yield proposal.as_workunit()
171
+ else:
172
+ yield MetadataWorkUnit(
173
+ id=MetadataWorkUnit.generate_workunit_id(proposal),
174
+ mcp_raw=proposal,
175
+ )
176
+
177
+ def _handle_assets(
178
+ self, assets: List[str], apply_urn: str, apply_type: str
179
+ ) -> Iterable[MetadataWorkUnit]:
180
+ for asset in assets:
181
+ change_proposals = apply_association_to_container(
182
+ asset, apply_urn, apply_type, emit=False, graph=self.graph
183
+ )
184
+ assert change_proposals is not None
185
+ yield from self._yield_workunits(change_proposals)
186
+
187
+ def _yield_domain(self) -> Iterable[MetadataWorkUnit]:
188
+ if not self.config.domain_apply:
189
+ return
190
+ for apply in self.config.domain_apply:
191
+ yield from self._handle_assets(apply.assets, apply.domain_urn, "domain")
192
+
193
+ def _yield_tag(self) -> Iterable[MetadataWorkUnit]:
194
+ if not self.config.tag_apply:
195
+ return
196
+ for apply in self.config.tag_apply:
197
+ yield from self._handle_assets(apply.assets, apply.tag_urn, "tag")
198
+
199
+ def _yield_term(self) -> Iterable[MetadataWorkUnit]:
200
+ if not self.config.term_apply:
201
+ return
202
+ for apply in self.config.term_apply:
203
+ yield from self._handle_assets(apply.assets, apply.term_urn, "term")
204
+
205
+ def _yield_owner(self) -> Iterable[MetadataWorkUnit]:
206
+ if not self.config.owner_apply:
207
+ return
208
+ for apply in self.config.owner_apply:
209
+ yield from self._handle_assets(apply.assets, apply.owner_urn, "owner")
210
+
211
+ def get_workunits_internal(
212
+ self,
213
+ ) -> Iterable[MetadataWorkUnit]:
214
+ yield from self._yield_domain()
215
+ yield from self._yield_tag()
216
+ yield from self._yield_term()
217
+ yield from self._yield_owner()
218
+
219
+ def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]:
220
+ return [partial(auto_workunit_reporter, self.get_report())]
221
+
222
+ def get_report(self) -> SourceReport:
223
+ return self.report
@@ -488,7 +488,7 @@ class DBTCoreSource(DBTSourceBase, TestableSource):
488
488
  ) -> Dict:
489
489
  if re.match("^https?://", uri):
490
490
  return json.loads(requests.get(uri).text)
491
- elif re.match("^s3://", uri):
491
+ elif is_s3_uri(uri):
492
492
  u = urlparse(uri)
493
493
  assert aws_connection
494
494
  response = aws_connection.get_s3_client().get_object(
@@ -2,8 +2,6 @@ import logging
2
2
  from abc import abstractmethod
3
3
  from typing import Any, Dict, Optional
4
4
 
5
- import cachetools
6
- import cachetools.keys
7
5
  import pydantic
8
6
  from pydantic import Field
9
7
  from sqlalchemy.engine import URL
@@ -29,7 +27,6 @@ from datahub.ingestion.source.state.stateful_ingestion_base import (
29
27
  StatefulIngestionConfigBase,
30
28
  )
31
29
  from datahub.ingestion.source_config.operation_config import is_profiling_enabled
32
- from datahub.utilities.cachetools_keys import self_methodkey
33
30
 
34
31
  logger: logging.Logger = logging.getLogger(__name__)
35
32
 
@@ -118,13 +115,6 @@ class SQLCommonConfig(
118
115
  # Custom Stateful Ingestion settings
119
116
  stateful_ingestion: Optional[StatefulStaleMetadataRemovalConfig] = None
120
117
 
121
- # TRICKY: The operation_config is time-dependent. Because we don't want to change
122
- # whether or not we're running profiling mid-ingestion, we cache the result of this method.
123
- # TODO: This decorator should be moved to the is_profiling_enabled(operation_config) method.
124
- @cachetools.cached(
125
- cache=cachetools.LRUCache(maxsize=1),
126
- key=self_methodkey,
127
- )
128
118
  def is_profiling_enabled(self) -> bool:
129
119
  return self.profiling.enabled and is_profiling_enabled(
130
120
  self.profiling.operation_config
@@ -2,10 +2,12 @@ import datetime
2
2
  import logging
3
3
  from typing import Any, Dict, Optional
4
4
 
5
+ import cachetools
5
6
  import pydantic
6
7
  from pydantic.fields import Field
7
8
 
8
9
  from datahub.configuration.common import ConfigModel
10
+ from datahub.utilities.cachetools_keys import self_methodkey
9
11
 
10
12
  logger = logging.getLogger(__name__)
11
13
 
@@ -62,6 +64,13 @@ class OperationConfig(ConfigModel):
62
64
  return profile_date_of_month
63
65
 
64
66
 
67
+ # TRICKY: The operation_config is time-dependent. Because we don't want to change
68
+ # whether or not we're running profiling mid-ingestion, we cache the result of this method.
69
+ # An additional benefit is that we only print the log lines on the first call.
70
+ @cachetools.cached(
71
+ cache=cachetools.LRUCache(maxsize=1),
72
+ key=self_methodkey,
73
+ )
65
74
  def is_profiling_enabled(operation_config: OperationConfig) -> bool:
66
75
  if operation_config.lower_freq_profile_enabled is False:
67
76
  return True
@@ -971,13 +971,15 @@ class AssertionSourceTypeClass(object):
971
971
  # No docs available.
972
972
 
973
973
  NATIVE = "NATIVE"
974
- """The assertion was defined natively on DataHub by a user."""
974
+ """The assertion was defined natively on DataHub by a user.
975
+ DataHub Cloud only"""
975
976
 
976
977
  EXTERNAL = "EXTERNAL"
977
978
  """The assertion was defined and managed externally of DataHub."""
978
979
 
979
980
  INFERRED = "INFERRED"
980
- """The assertion was inferred, e.g. from offline AI / ML models."""
981
+ """The assertion was inferred, e.g. from offline AI / ML models.
982
+ DataHub Cloud only"""
981
983
 
982
984
 
983
985
 
@@ -4120,6 +4122,7 @@ class DeprecationClass(_Aspect):
4120
4122
  note: str,
4121
4123
  actor: str,
4122
4124
  decommissionTime: Union[None, int]=None,
4125
+ replacement: Union[None, str]=None,
4123
4126
  ):
4124
4127
  super().__init__()
4125
4128
 
@@ -4127,12 +4130,14 @@ class DeprecationClass(_Aspect):
4127
4130
  self.decommissionTime = decommissionTime
4128
4131
  self.note = note
4129
4132
  self.actor = actor
4133
+ self.replacement = replacement
4130
4134
 
4131
4135
  def _restore_defaults(self) -> None:
4132
4136
  self.deprecated = bool()
4133
4137
  self.decommissionTime = self.RECORD_SCHEMA.fields_dict["decommissionTime"].default
4134
4138
  self.note = str()
4135
4139
  self.actor = str()
4140
+ self.replacement = self.RECORD_SCHEMA.fields_dict["replacement"].default
4136
4141
 
4137
4142
 
4138
4143
  @property
@@ -4175,6 +4180,58 @@ class DeprecationClass(_Aspect):
4175
4180
  self._inner_dict['actor'] = value
4176
4181
 
4177
4182
 
4183
+ @property
4184
+ def replacement(self) -> Union[None, str]:
4185
+ # No docs available.
4186
+ return self._inner_dict.get('replacement') # type: ignore
4187
+
4188
+ @replacement.setter
4189
+ def replacement(self, value: Union[None, str]) -> None:
4190
+ self._inner_dict['replacement'] = value
4191
+
4192
+
4193
+ class DisplayPropertiesClass(_Aspect):
4194
+ """Properties related to how the entity is displayed in the Datahub UI"""
4195
+
4196
+
4197
+ ASPECT_NAME = 'displayProperties'
4198
+ ASPECT_INFO = {}
4199
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.common.DisplayProperties")
4200
+
4201
+ def __init__(self,
4202
+ colorHex: Union[None, str]=None,
4203
+ icon: Union[None, "IconPropertiesClass"]=None,
4204
+ ):
4205
+ super().__init__()
4206
+
4207
+ self.colorHex = colorHex
4208
+ self.icon = icon
4209
+
4210
+ def _restore_defaults(self) -> None:
4211
+ self.colorHex = self.RECORD_SCHEMA.fields_dict["colorHex"].default
4212
+ self.icon = self.RECORD_SCHEMA.fields_dict["icon"].default
4213
+
4214
+
4215
+ @property
4216
+ def colorHex(self) -> Union[None, str]:
4217
+ """The color associated with the entity in Hex. For example #FFFFFF."""
4218
+ return self._inner_dict.get('colorHex') # type: ignore
4219
+
4220
+ @colorHex.setter
4221
+ def colorHex(self, value: Union[None, str]) -> None:
4222
+ self._inner_dict['colorHex'] = value
4223
+
4224
+
4225
+ @property
4226
+ def icon(self) -> Union[None, "IconPropertiesClass"]:
4227
+ """The icon associated with the entity"""
4228
+ return self._inner_dict.get('icon') # type: ignore
4229
+
4230
+ @icon.setter
4231
+ def icon(self, value: Union[None, "IconPropertiesClass"]) -> None:
4232
+ self._inner_dict['icon'] = value
4233
+
4234
+
4178
4235
  class DocumentationClass(_Aspect):
4179
4236
  """Aspect used for storing all applicable documentations on assets.
4180
4237
  This aspect supports multiple documentations from different sources.
@@ -4830,6 +4887,65 @@ class GlossaryTermsClass(_Aspect):
4830
4887
  self._inner_dict['auditStamp'] = value
4831
4888
 
4832
4889
 
4890
+ class IconLibraryClass(object):
4891
+ """Enum of possible icon sources"""
4892
+
4893
+ MATERIAL = "MATERIAL"
4894
+ """Material UI"""
4895
+
4896
+
4897
+
4898
+ class IconPropertiesClass(DictWrapper):
4899
+ """Properties describing an icon associated with an entity"""
4900
+
4901
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.common.IconProperties")
4902
+ def __init__(self,
4903
+ iconLibrary: Union[str, "IconLibraryClass"],
4904
+ name: str,
4905
+ style: str,
4906
+ ):
4907
+ super().__init__()
4908
+
4909
+ self.iconLibrary = iconLibrary
4910
+ self.name = name
4911
+ self.style = style
4912
+
4913
+ def _restore_defaults(self) -> None:
4914
+ self.iconLibrary = IconLibraryClass.MATERIAL
4915
+ self.name = str()
4916
+ self.style = str()
4917
+
4918
+
4919
+ @property
4920
+ def iconLibrary(self) -> Union[str, "IconLibraryClass"]:
4921
+ """The source of the icon: e.g. Antd, Material, etc"""
4922
+ return self._inner_dict.get('iconLibrary') # type: ignore
4923
+
4924
+ @iconLibrary.setter
4925
+ def iconLibrary(self, value: Union[str, "IconLibraryClass"]) -> None:
4926
+ self._inner_dict['iconLibrary'] = value
4927
+
4928
+
4929
+ @property
4930
+ def name(self) -> str:
4931
+ """The name of the icon"""
4932
+ return self._inner_dict.get('name') # type: ignore
4933
+
4934
+ @name.setter
4935
+ def name(self, value: str) -> None:
4936
+ self._inner_dict['name'] = value
4937
+
4938
+
4939
+ @property
4940
+ def style(self) -> str:
4941
+ """Any modifier for the icon, this will be library-specific, e.g. filled/outlined, etc"""
4942
+ return self._inner_dict.get('style') # type: ignore
4943
+
4944
+ @style.setter
4945
+ def style(self, value: str) -> None:
4946
+ self._inner_dict['style'] = value
4947
+
4948
+
4833
4949
  class IncidentSummaryDetailsClass(DictWrapper):
4834
4950
  """Summary statistics about incidents on an entity."""
4835
4951
 
@@ -12965,13 +13081,16 @@ class CorpUserAppearanceSettingsClass(DictWrapper):
12965
13081
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.identity.CorpUserAppearanceSettings")
12966
13082
  def __init__(self,
12967
13083
  showSimplifiedHomepage: Union[None, bool]=None,
13084
+ showThemeV2: Union[None, bool]=None,
12968
13085
  ):
12969
13086
  super().__init__()
12970
13087
 
12971
13088
  self.showSimplifiedHomepage = showSimplifiedHomepage
13089
+ self.showThemeV2 = showThemeV2
12972
13090
 
12973
13091
  def _restore_defaults(self) -> None:
12974
13092
  self.showSimplifiedHomepage = self.RECORD_SCHEMA.fields_dict["showSimplifiedHomepage"].default
13093
+ self.showThemeV2 = self.RECORD_SCHEMA.fields_dict["showThemeV2"].default
12975
13094
 
12976
13095
 
12977
13096
  @property
@@ -12985,6 +13104,16 @@ class CorpUserAppearanceSettingsClass(DictWrapper):
12985
13104
  self._inner_dict['showSimplifiedHomepage'] = value
12986
13105
 
12987
13106
 
13107
+ @property
13108
+ def showThemeV2(self) -> Union[None, bool]:
13109
+ """Flag controlling whether the V2 UI for DataHub is shown."""
13110
+ return self._inner_dict.get('showThemeV2') # type: ignore
13111
+
13112
+ @showThemeV2.setter
13113
+ def showThemeV2(self, value: Union[None, bool]) -> None:
13114
+ self._inner_dict['showThemeV2'] = value
13115
+
13116
+
12988
13117
  class CorpUserCredentialsClass(_Aspect):
12989
13118
  """Corp user credentials"""
12990
13119
 
@@ -15780,7 +15909,7 @@ class SchemaFieldKeyClass(_Aspect):
15780
15909
 
15781
15910
 
15782
15911
  ASPECT_NAME = 'schemaFieldKey'
15783
- ASPECT_INFO = {'keyForEntity': 'schemaField', 'entityCategory': 'core', 'entityAspects': ['schemafieldInfo', 'structuredProperties', 'forms', 'businessAttributes', 'status', 'schemaFieldAliases', 'documentation', 'testResults']}
15912
+ ASPECT_INFO = {'keyForEntity': 'schemaField', 'entityCategory': 'core', 'entityAspects': ['schemafieldInfo', 'structuredProperties', 'forms', 'businessAttributes', 'status', 'schemaFieldAliases', 'documentation', 'testResults', 'deprecation']}
15784
15913
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.SchemaFieldKey")
15785
15914
 
15786
15915
  def __init__(self,
@@ -20824,7 +20953,7 @@ class PostInfoClass(_Aspect):
20824
20953
 
20825
20954
  @property
20826
20955
  def target(self) -> Union[None, str]:
20827
- """Optional URN that the post is associated with."""
20956
+ """Optional Entity URN that the post is associated with."""
20828
20957
  return self._inner_dict.get('target') # type: ignore
20829
20958
 
20830
20959
  @target.setter
@@ -20838,6 +20967,9 @@ class PostTypeClass(object):
20838
20967
  HOME_PAGE_ANNOUNCEMENT = "HOME_PAGE_ANNOUNCEMENT"
20839
20968
  """The Post is an Home Page announcement."""
20840
20969
 
20970
+ ENTITY_ANNOUNCEMENT = "ENTITY_ANNOUNCEMENT"
20971
+ """The Post is an Entity level announcement."""
20972
+
20841
20973
 
20842
20974
 
20843
20975
  class QueryLanguageClass(object):
@@ -25030,6 +25162,7 @@ __SCHEMA_TYPES = {
25030
25162
  'com.linkedin.pegasus2avro.common.DataTransform': DataTransformClass,
25031
25163
  'com.linkedin.pegasus2avro.common.DataTransformLogic': DataTransformLogicClass,
25032
25164
  'com.linkedin.pegasus2avro.common.Deprecation': DeprecationClass,
25165
+ 'com.linkedin.pegasus2avro.common.DisplayProperties': DisplayPropertiesClass,
25033
25166
  'com.linkedin.pegasus2avro.common.Documentation': DocumentationClass,
25034
25167
  'com.linkedin.pegasus2avro.common.DocumentationAssociation': DocumentationAssociationClass,
25035
25168
  'com.linkedin.pegasus2avro.common.Edge': EdgeClass,
@@ -25044,6 +25177,8 @@ __SCHEMA_TYPES = {
25044
25177
  'com.linkedin.pegasus2avro.common.GlobalTags': GlobalTagsClass,
25045
25178
  'com.linkedin.pegasus2avro.common.GlossaryTermAssociation': GlossaryTermAssociationClass,
25046
25179
  'com.linkedin.pegasus2avro.common.GlossaryTerms': GlossaryTermsClass,
25180
+ 'com.linkedin.pegasus2avro.common.IconLibrary': IconLibraryClass,
25181
+ 'com.linkedin.pegasus2avro.common.IconProperties': IconPropertiesClass,
25047
25182
  'com.linkedin.pegasus2avro.common.IncidentSummaryDetails': IncidentSummaryDetailsClass,
25048
25183
  'com.linkedin.pegasus2avro.common.IncidentsSummary': IncidentsSummaryClass,
25049
25184
  'com.linkedin.pegasus2avro.common.InputField': InputFieldClass,
@@ -25503,6 +25638,7 @@ __SCHEMA_TYPES = {
25503
25638
  'DataTransform': DataTransformClass,
25504
25639
  'DataTransformLogic': DataTransformLogicClass,
25505
25640
  'Deprecation': DeprecationClass,
25641
+ 'DisplayProperties': DisplayPropertiesClass,
25506
25642
  'Documentation': DocumentationClass,
25507
25643
  'DocumentationAssociation': DocumentationAssociationClass,
25508
25644
  'Edge': EdgeClass,
@@ -25517,6 +25653,8 @@ __SCHEMA_TYPES = {
25517
25653
  'GlobalTags': GlobalTagsClass,
25518
25654
  'GlossaryTermAssociation': GlossaryTermAssociationClass,
25519
25655
  'GlossaryTerms': GlossaryTermsClass,
25656
+ 'IconLibrary': IconLibraryClass,
25657
+ 'IconProperties': IconPropertiesClass,
25520
25658
  'IncidentSummaryDetails': IncidentSummaryDetailsClass,
25521
25659
  'IncidentsSummary': IncidentsSummaryClass,
25522
25660
  'InputField': InputFieldClass,
@@ -25926,6 +26064,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
25926
26064
  DocumentationClass,
25927
26065
  DataPlatformInstanceClass,
25928
26066
  InputFieldsClass,
26067
+ DisplayPropertiesClass,
25929
26068
  OwnershipClass,
25930
26069
  OperationClass,
25931
26070
  FormsClass,
@@ -26144,6 +26283,7 @@ class AspectBag(TypedDict, total=False):
26144
26283
  documentation: DocumentationClass
26145
26284
  dataPlatformInstance: DataPlatformInstanceClass
26146
26285
  inputFields: InputFieldsClass
26286
+ displayProperties: DisplayPropertiesClass
26147
26287
  ownership: OwnershipClass
26148
26288
  operation: OperationClass
26149
26289
  forms: FormsClass
@@ -22,6 +22,7 @@ from .....schema_classes import DataPlatformInstanceClass
22
22
  from .....schema_classes import DataTransformClass
23
23
  from .....schema_classes import DataTransformLogicClass
24
24
  from .....schema_classes import DeprecationClass
25
+ from .....schema_classes import DisplayPropertiesClass
25
26
  from .....schema_classes import DocumentationClass
26
27
  from .....schema_classes import DocumentationAssociationClass
27
28
  from .....schema_classes import EdgeClass
@@ -36,6 +37,8 @@ from .....schema_classes import FormsClass
36
37
  from .....schema_classes import GlobalTagsClass
37
38
  from .....schema_classes import GlossaryTermAssociationClass
38
39
  from .....schema_classes import GlossaryTermsClass
40
+ from .....schema_classes import IconLibraryClass
41
+ from .....schema_classes import IconPropertiesClass
39
42
  from .....schema_classes import IncidentSummaryDetailsClass
40
43
  from .....schema_classes import IncidentsSummaryClass
41
44
  from .....schema_classes import InputFieldClass
@@ -85,6 +88,7 @@ DataPlatformInstance = DataPlatformInstanceClass
85
88
  DataTransform = DataTransformClass
86
89
  DataTransformLogic = DataTransformLogicClass
87
90
  Deprecation = DeprecationClass
91
+ DisplayProperties = DisplayPropertiesClass
88
92
  Documentation = DocumentationClass
89
93
  DocumentationAssociation = DocumentationAssociationClass
90
94
  Edge = EdgeClass
@@ -99,6 +103,8 @@ Forms = FormsClass
99
103
  GlobalTags = GlobalTagsClass
100
104
  GlossaryTermAssociation = GlossaryTermAssociationClass
101
105
  GlossaryTerms = GlossaryTermsClass
106
+ IconLibrary = IconLibraryClass
107
+ IconProperties = IconPropertiesClass
102
108
  IncidentSummaryDetails = IncidentSummaryDetailsClass
103
109
  IncidentsSummary = IncidentsSummaryClass
104
110
  InputField = InputFieldClass