acryl-datahub 1.3.0.1rc9__py3-none-any.whl → 1.3.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub might be problematic. Click here for more details.

Files changed (263) hide show
  1. {acryl_datahub-1.3.0.1rc9.dist-info → acryl_datahub-1.3.1.1.dist-info}/METADATA +2550 -2543
  2. {acryl_datahub-1.3.0.1rc9.dist-info → acryl_datahub-1.3.1.1.dist-info}/RECORD +263 -261
  3. datahub/_version.py +1 -1
  4. datahub/api/entities/common/serialized_value.py +2 -2
  5. datahub/api/entities/corpgroup/corpgroup.py +11 -6
  6. datahub/api/entities/corpuser/corpuser.py +11 -11
  7. datahub/api/entities/dataproduct/dataproduct.py +47 -27
  8. datahub/api/entities/dataset/dataset.py +32 -21
  9. datahub/api/entities/external/lake_formation_external_entites.py +5 -6
  10. datahub/api/entities/external/unity_catalog_external_entites.py +5 -7
  11. datahub/api/entities/forms/forms.py +16 -14
  12. datahub/api/entities/structuredproperties/structuredproperties.py +23 -16
  13. datahub/cli/check_cli.py +2 -2
  14. datahub/cli/config_utils.py +3 -3
  15. datahub/cli/lite_cli.py +9 -7
  16. datahub/cli/migrate.py +4 -4
  17. datahub/cli/quickstart_versioning.py +3 -3
  18. datahub/cli/specific/group_cli.py +1 -1
  19. datahub/cli/specific/structuredproperties_cli.py +1 -1
  20. datahub/cli/specific/user_cli.py +1 -1
  21. datahub/configuration/common.py +14 -2
  22. datahub/configuration/connection_resolver.py +2 -2
  23. datahub/configuration/git.py +47 -30
  24. datahub/configuration/import_resolver.py +2 -2
  25. datahub/configuration/kafka.py +4 -3
  26. datahub/configuration/time_window_config.py +26 -26
  27. datahub/configuration/validate_field_deprecation.py +2 -2
  28. datahub/configuration/validate_field_removal.py +2 -2
  29. datahub/configuration/validate_field_rename.py +2 -2
  30. datahub/configuration/validate_multiline_string.py +2 -1
  31. datahub/emitter/kafka_emitter.py +3 -1
  32. datahub/emitter/rest_emitter.py +2 -4
  33. datahub/ingestion/api/decorators.py +1 -1
  34. datahub/ingestion/api/report.py +1 -1
  35. datahub/ingestion/api/sink.py +1 -1
  36. datahub/ingestion/api/source.py +1 -1
  37. datahub/ingestion/glossary/datahub_classifier.py +11 -8
  38. datahub/ingestion/graph/client.py +5 -1
  39. datahub/ingestion/reporting/datahub_ingestion_run_summary_provider.py +1 -1
  40. datahub/ingestion/reporting/file_reporter.py +5 -4
  41. datahub/ingestion/run/pipeline.py +7 -6
  42. datahub/ingestion/run/pipeline_config.py +12 -14
  43. datahub/ingestion/run/sink_callback.py +1 -1
  44. datahub/ingestion/sink/datahub_rest.py +6 -4
  45. datahub/ingestion/source/abs/config.py +19 -19
  46. datahub/ingestion/source/abs/datalake_profiler_config.py +11 -13
  47. datahub/ingestion/source/abs/source.py +2 -2
  48. datahub/ingestion/source/aws/aws_common.py +1 -1
  49. datahub/ingestion/source/aws/glue.py +6 -4
  50. datahub/ingestion/source/aws/sagemaker.py +1 -1
  51. datahub/ingestion/source/azure/azure_common.py +8 -12
  52. datahub/ingestion/source/bigquery_v2/bigquery.py +1 -1
  53. datahub/ingestion/source/bigquery_v2/bigquery_config.py +43 -30
  54. datahub/ingestion/source/bigquery_v2/bigquery_queries.py +1 -1
  55. datahub/ingestion/source/cassandra/cassandra.py +1 -1
  56. datahub/ingestion/source/common/gcp_credentials_config.py +10 -10
  57. datahub/ingestion/source/data_lake_common/path_spec.py +85 -89
  58. datahub/ingestion/source/datahub/config.py +8 -8
  59. datahub/ingestion/source/datahub/datahub_source.py +1 -1
  60. datahub/ingestion/source/dbt/dbt_cloud.py +9 -3
  61. datahub/ingestion/source/dbt/dbt_common.py +39 -37
  62. datahub/ingestion/source/dbt/dbt_core.py +10 -12
  63. datahub/ingestion/source/debug/datahub_debug.py +1 -1
  64. datahub/ingestion/source/delta_lake/config.py +6 -4
  65. datahub/ingestion/source/dremio/dremio_api.py +212 -78
  66. datahub/ingestion/source/dremio/dremio_config.py +10 -6
  67. datahub/ingestion/source/dremio/dremio_entities.py +55 -39
  68. datahub/ingestion/source/dremio/dremio_profiling.py +14 -3
  69. datahub/ingestion/source/dremio/dremio_source.py +24 -26
  70. datahub/ingestion/source/dynamodb/dynamodb.py +1 -1
  71. datahub/ingestion/source/elastic_search.py +110 -32
  72. datahub/ingestion/source/excel/source.py +1 -1
  73. datahub/ingestion/source/feast.py +1 -1
  74. datahub/ingestion/source/file.py +5 -4
  75. datahub/ingestion/source/fivetran/config.py +17 -16
  76. datahub/ingestion/source/fivetran/fivetran.py +2 -2
  77. datahub/ingestion/source/gc/datahub_gc.py +1 -1
  78. datahub/ingestion/source/gcs/gcs_source.py +8 -10
  79. datahub/ingestion/source/ge_profiling_config.py +8 -5
  80. datahub/ingestion/source/grafana/grafana_api.py +2 -2
  81. datahub/ingestion/source/grafana/grafana_config.py +4 -3
  82. datahub/ingestion/source/grafana/grafana_source.py +1 -1
  83. datahub/ingestion/source/grafana/models.py +23 -5
  84. datahub/ingestion/source/hex/api.py +7 -5
  85. datahub/ingestion/source/hex/hex.py +4 -3
  86. datahub/ingestion/source/iceberg/iceberg.py +1 -1
  87. datahub/ingestion/source/iceberg/iceberg_common.py +5 -3
  88. datahub/ingestion/source/identity/azure_ad.py +1 -1
  89. datahub/ingestion/source/identity/okta.py +10 -10
  90. datahub/ingestion/source/kafka/kafka.py +1 -1
  91. datahub/ingestion/source/ldap.py +1 -1
  92. datahub/ingestion/source/looker/looker_common.py +7 -5
  93. datahub/ingestion/source/looker/looker_config.py +21 -20
  94. datahub/ingestion/source/looker/lookml_config.py +47 -47
  95. datahub/ingestion/source/metabase.py +8 -8
  96. datahub/ingestion/source/metadata/business_glossary.py +2 -2
  97. datahub/ingestion/source/metadata/lineage.py +13 -8
  98. datahub/ingestion/source/mlflow.py +1 -1
  99. datahub/ingestion/source/mode.py +6 -4
  100. datahub/ingestion/source/mongodb.py +4 -3
  101. datahub/ingestion/source/neo4j/neo4j_source.py +1 -1
  102. datahub/ingestion/source/nifi.py +17 -23
  103. datahub/ingestion/source/openapi.py +6 -8
  104. datahub/ingestion/source/powerbi/config.py +33 -32
  105. datahub/ingestion/source/powerbi/dataplatform_instance_resolver.py +2 -2
  106. datahub/ingestion/source/powerbi/powerbi.py +1 -1
  107. datahub/ingestion/source/powerbi_report_server/report_server.py +2 -2
  108. datahub/ingestion/source/powerbi_report_server/report_server_domain.py +8 -6
  109. datahub/ingestion/source/preset.py +8 -8
  110. datahub/ingestion/source/pulsar.py +1 -1
  111. datahub/ingestion/source/qlik_sense/data_classes.py +15 -8
  112. datahub/ingestion/source/qlik_sense/qlik_api.py +7 -7
  113. datahub/ingestion/source/qlik_sense/qlik_sense.py +1 -1
  114. datahub/ingestion/source/redshift/config.py +18 -20
  115. datahub/ingestion/source/redshift/redshift.py +2 -2
  116. datahub/ingestion/source/redshift/usage.py +23 -3
  117. datahub/ingestion/source/s3/config.py +83 -62
  118. datahub/ingestion/source/s3/datalake_profiler_config.py +11 -13
  119. datahub/ingestion/source/s3/source.py +8 -5
  120. datahub/ingestion/source/sac/sac.py +5 -4
  121. datahub/ingestion/source/salesforce.py +3 -2
  122. datahub/ingestion/source/schema/json_schema.py +2 -2
  123. datahub/ingestion/source/sigma/data_classes.py +3 -2
  124. datahub/ingestion/source/sigma/sigma.py +1 -1
  125. datahub/ingestion/source/sigma/sigma_api.py +7 -7
  126. datahub/ingestion/source/slack/slack.py +1 -1
  127. datahub/ingestion/source/snaplogic/snaplogic.py +1 -1
  128. datahub/ingestion/source/snowflake/snowflake_assertion.py +1 -1
  129. datahub/ingestion/source/snowflake/snowflake_config.py +35 -31
  130. datahub/ingestion/source/snowflake/snowflake_connection.py +35 -13
  131. datahub/ingestion/source/snowflake/snowflake_lineage_v2.py +3 -3
  132. datahub/ingestion/source/snowflake/snowflake_queries.py +28 -4
  133. datahub/ingestion/source/sql/athena.py +1 -1
  134. datahub/ingestion/source/sql/clickhouse.py +4 -2
  135. datahub/ingestion/source/sql/cockroachdb.py +1 -1
  136. datahub/ingestion/source/sql/druid.py +1 -1
  137. datahub/ingestion/source/sql/hana.py +1 -1
  138. datahub/ingestion/source/sql/hive.py +7 -5
  139. datahub/ingestion/source/sql/hive_metastore.py +1 -1
  140. datahub/ingestion/source/sql/mssql/source.py +13 -6
  141. datahub/ingestion/source/sql/mysql.py +1 -1
  142. datahub/ingestion/source/sql/oracle.py +17 -10
  143. datahub/ingestion/source/sql/postgres.py +2 -2
  144. datahub/ingestion/source/sql/presto.py +1 -1
  145. datahub/ingestion/source/sql/sql_config.py +8 -9
  146. datahub/ingestion/source/sql/sql_generic.py +1 -1
  147. datahub/ingestion/source/sql/teradata.py +1 -1
  148. datahub/ingestion/source/sql/trino.py +1 -1
  149. datahub/ingestion/source/sql/vertica.py +5 -4
  150. datahub/ingestion/source/sql_queries.py +174 -22
  151. datahub/ingestion/source/state/checkpoint.py +2 -2
  152. datahub/ingestion/source/state/entity_removal_state.py +2 -1
  153. datahub/ingestion/source/state/stateful_ingestion_base.py +55 -45
  154. datahub/ingestion/source/state_provider/datahub_ingestion_checkpointing_provider.py +1 -1
  155. datahub/ingestion/source/state_provider/file_ingestion_checkpointing_provider.py +1 -1
  156. datahub/ingestion/source/superset.py +9 -9
  157. datahub/ingestion/source/tableau/tableau.py +14 -16
  158. datahub/ingestion/source/unity/azure_auth_config.py +15 -0
  159. datahub/ingestion/source/unity/config.py +51 -34
  160. datahub/ingestion/source/unity/connection.py +7 -1
  161. datahub/ingestion/source/unity/connection_test.py +1 -1
  162. datahub/ingestion/source/unity/proxy.py +216 -7
  163. datahub/ingestion/source/unity/proxy_types.py +91 -0
  164. datahub/ingestion/source/unity/source.py +29 -3
  165. datahub/ingestion/source/usage/clickhouse_usage.py +1 -1
  166. datahub/ingestion/source/usage/starburst_trino_usage.py +1 -1
  167. datahub/ingestion/source/usage/usage_common.py +5 -3
  168. datahub/ingestion/source_config/csv_enricher.py +7 -6
  169. datahub/ingestion/source_config/operation_config.py +7 -4
  170. datahub/ingestion/source_config/pulsar.py +11 -15
  171. datahub/ingestion/transformer/add_dataset_browse_path.py +1 -1
  172. datahub/ingestion/transformer/add_dataset_dataproduct.py +6 -5
  173. datahub/ingestion/transformer/add_dataset_ownership.py +3 -3
  174. datahub/ingestion/transformer/add_dataset_properties.py +2 -2
  175. datahub/ingestion/transformer/add_dataset_schema_tags.py +2 -2
  176. datahub/ingestion/transformer/add_dataset_schema_terms.py +2 -2
  177. datahub/ingestion/transformer/add_dataset_tags.py +3 -3
  178. datahub/ingestion/transformer/add_dataset_terms.py +3 -3
  179. datahub/ingestion/transformer/dataset_domain.py +3 -3
  180. datahub/ingestion/transformer/dataset_domain_based_on_tags.py +1 -1
  181. datahub/ingestion/transformer/extract_dataset_tags.py +1 -1
  182. datahub/ingestion/transformer/extract_ownership_from_tags.py +1 -1
  183. datahub/ingestion/transformer/mark_dataset_status.py +1 -1
  184. datahub/ingestion/transformer/pattern_cleanup_dataset_usage_user.py +1 -1
  185. datahub/ingestion/transformer/pattern_cleanup_ownership.py +1 -1
  186. datahub/ingestion/transformer/remove_dataset_ownership.py +1 -1
  187. datahub/ingestion/transformer/replace_external_url.py +2 -2
  188. datahub/ingestion/transformer/set_browse_path.py +1 -1
  189. datahub/ingestion/transformer/tags_to_terms.py +1 -1
  190. datahub/lite/duckdb_lite.py +1 -1
  191. datahub/lite/lite_util.py +2 -2
  192. datahub/metadata/_internal_schema_classes.py +62 -2
  193. datahub/metadata/com/linkedin/pegasus2avro/assertion/__init__.py +2 -0
  194. datahub/metadata/schema.avsc +271 -91
  195. datahub/metadata/schemas/ApplicationProperties.avsc +5 -2
  196. datahub/metadata/schemas/AssertionInfo.avsc +48 -5
  197. datahub/metadata/schemas/BusinessAttributeInfo.avsc +8 -4
  198. datahub/metadata/schemas/ChartInfo.avsc +12 -5
  199. datahub/metadata/schemas/ContainerProperties.avsc +12 -5
  200. datahub/metadata/schemas/CorpGroupEditableInfo.avsc +2 -1
  201. datahub/metadata/schemas/CorpGroupInfo.avsc +7 -3
  202. datahub/metadata/schemas/CorpUserInfo.avsc +5 -2
  203. datahub/metadata/schemas/CorpUserSettings.avsc +4 -2
  204. datahub/metadata/schemas/DashboardInfo.avsc +16 -4
  205. datahub/metadata/schemas/DataFlowInfo.avsc +11 -5
  206. datahub/metadata/schemas/DataHubPageModuleProperties.avsc +4 -2
  207. datahub/metadata/schemas/DataJobInfo.avsc +9 -4
  208. datahub/metadata/schemas/DataPlatformInfo.avsc +3 -1
  209. datahub/metadata/schemas/DataPlatformInstanceProperties.avsc +5 -2
  210. datahub/metadata/schemas/DataProductProperties.avsc +5 -2
  211. datahub/metadata/schemas/DataTypeInfo.avsc +5 -0
  212. datahub/metadata/schemas/DatasetKey.avsc +2 -1
  213. datahub/metadata/schemas/DatasetProperties.avsc +12 -5
  214. datahub/metadata/schemas/DomainProperties.avsc +7 -3
  215. datahub/metadata/schemas/EditableContainerProperties.avsc +2 -1
  216. datahub/metadata/schemas/EditableDashboardProperties.avsc +2 -1
  217. datahub/metadata/schemas/EditableDataFlowProperties.avsc +2 -1
  218. datahub/metadata/schemas/EditableDataJobProperties.avsc +2 -1
  219. datahub/metadata/schemas/EditableDatasetProperties.avsc +2 -1
  220. datahub/metadata/schemas/EditableERModelRelationshipProperties.avsc +2 -1
  221. datahub/metadata/schemas/EditableMLFeatureProperties.avsc +2 -1
  222. datahub/metadata/schemas/EditableMLFeatureTableProperties.avsc +2 -1
  223. datahub/metadata/schemas/EditableMLModelGroupProperties.avsc +2 -1
  224. datahub/metadata/schemas/EditableMLModelProperties.avsc +2 -1
  225. datahub/metadata/schemas/EditableNotebookProperties.avsc +2 -1
  226. datahub/metadata/schemas/EditableSchemaMetadata.avsc +5 -3
  227. datahub/metadata/schemas/EntityTypeInfo.avsc +5 -0
  228. datahub/metadata/schemas/GlobalTags.avsc +3 -2
  229. datahub/metadata/schemas/GlossaryNodeInfo.avsc +3 -1
  230. datahub/metadata/schemas/GlossaryTermInfo.avsc +3 -1
  231. datahub/metadata/schemas/InputFields.avsc +3 -2
  232. datahub/metadata/schemas/MLFeatureKey.avsc +3 -1
  233. datahub/metadata/schemas/MLFeatureTableKey.avsc +3 -1
  234. datahub/metadata/schemas/MLModelDeploymentKey.avsc +3 -1
  235. datahub/metadata/schemas/MLModelGroupKey.avsc +3 -1
  236. datahub/metadata/schemas/MLModelKey.avsc +3 -1
  237. datahub/metadata/schemas/MLModelProperties.avsc +4 -2
  238. datahub/metadata/schemas/MLPrimaryKeyKey.avsc +3 -1
  239. datahub/metadata/schemas/MetadataChangeEvent.avsc +124 -50
  240. datahub/metadata/schemas/NotebookInfo.avsc +5 -2
  241. datahub/metadata/schemas/Ownership.avsc +3 -2
  242. datahub/metadata/schemas/QuerySubjects.avsc +1 -1
  243. datahub/metadata/schemas/RoleProperties.avsc +3 -1
  244. datahub/metadata/schemas/SchemaFieldInfo.avsc +3 -1
  245. datahub/metadata/schemas/SchemaMetadata.avsc +3 -2
  246. datahub/metadata/schemas/StructuredPropertyDefinition.avsc +15 -4
  247. datahub/metadata/schemas/TagProperties.avsc +3 -1
  248. datahub/metadata/schemas/TestInfo.avsc +2 -1
  249. datahub/sdk/__init__.py +1 -0
  250. datahub/sdk/_all_entities.py +2 -0
  251. datahub/sdk/search_filters.py +68 -40
  252. datahub/sdk/tag.py +112 -0
  253. datahub/secret/datahub_secret_store.py +7 -4
  254. datahub/secret/file_secret_store.py +1 -1
  255. datahub/sql_parsing/schema_resolver.py +29 -0
  256. datahub/sql_parsing/sql_parsing_aggregator.py +15 -0
  257. datahub/sql_parsing/sqlglot_lineage.py +5 -2
  258. datahub/testing/check_sql_parser_result.py +2 -2
  259. datahub/utilities/ingest_utils.py +1 -1
  260. {acryl_datahub-1.3.0.1rc9.dist-info → acryl_datahub-1.3.1.1.dist-info}/WHEEL +0 -0
  261. {acryl_datahub-1.3.0.1rc9.dist-info → acryl_datahub-1.3.1.1.dist-info}/entry_points.txt +0 -0
  262. {acryl_datahub-1.3.0.1rc9.dist-info → acryl_datahub-1.3.1.1.dist-info}/licenses/LICENSE +0 -0
  263. {acryl_datahub-1.3.0.1rc9.dist-info → acryl_datahub-1.3.1.1.dist-info}/top_level.txt +0 -0
@@ -15,6 +15,7 @@ from typing import (
15
15
  )
16
16
 
17
17
  import pydantic
18
+ from pydantic import ValidationInfo, field_validator
18
19
  from pydantic.fields import Field
19
20
 
20
21
  import datahub.emitter.mce_builder as builder
@@ -226,10 +227,11 @@ class BaseUsageConfig(BaseTimeWindowConfig):
226
227
  default=True, description="Whether to ingest the top_n_queries."
227
228
  )
228
229
 
229
- @pydantic.validator("top_n_queries")
230
- def ensure_top_n_queries_is_not_too_big(cls, v: int, values: dict) -> int:
230
+ @field_validator("top_n_queries", mode="after")
231
+ @classmethod
232
+ def ensure_top_n_queries_is_not_too_big(cls, v: int, info: ValidationInfo) -> int:
231
233
  minimum_query_size = 20
232
-
234
+ values = info.data
233
235
  max_queries = int(values["queries_character_limit"] / minimum_query_size)
234
236
  if v > max_queries:
235
237
  raise ValueError(
@@ -1,6 +1,5 @@
1
- from typing import Any, Dict
2
-
3
1
  import pydantic
2
+ from pydantic import field_validator
4
3
 
5
4
  from datahub.configuration.common import ConfigModel
6
5
 
@@ -21,7 +20,8 @@ class CSVEnricherConfig(ConfigModel):
21
20
  description="Delimiter to use when parsing array fields (tags, terms and owners)",
22
21
  )
23
22
 
24
- @pydantic.validator("write_semantics")
23
+ @field_validator("write_semantics", mode="after")
24
+ @classmethod
25
25
  def validate_write_semantics(cls, write_semantics: str) -> str:
26
26
  if write_semantics.lower() not in {"patch", "override"}:
27
27
  raise ValueError(
@@ -31,9 +31,10 @@ class CSVEnricherConfig(ConfigModel):
31
31
  )
32
32
  return write_semantics
33
33
 
34
- @pydantic.validator("array_delimiter")
35
- def validator_diff(cls, array_delimiter: str, values: Dict[str, Any]) -> str:
36
- if array_delimiter == values["delimiter"]:
34
+ @field_validator("array_delimiter", mode="after")
35
+ @classmethod
36
+ def validator_diff(cls, array_delimiter: str, info: pydantic.ValidationInfo) -> str:
37
+ if array_delimiter == info.data["delimiter"]:
37
38
  raise ValueError(
38
39
  "array_delimiter and delimiter are the same. Please choose different delimiters."
39
40
  )
@@ -3,7 +3,7 @@ import logging
3
3
  from typing import Any, Dict, Optional
4
4
 
5
5
  import cachetools
6
- import pydantic
6
+ from pydantic import field_validator, model_validator
7
7
  from pydantic.fields import Field
8
8
 
9
9
  from datahub.configuration.common import ConfigModel
@@ -26,7 +26,8 @@ class OperationConfig(ConfigModel):
26
26
  description="Number between 1 to 31 for date of month (both inclusive). If not specified, defaults to Nothing and this field does not take affect.",
27
27
  )
28
28
 
29
- @pydantic.root_validator(pre=True)
29
+ @model_validator(mode="before")
30
+ @classmethod
30
31
  def lower_freq_configs_are_set(cls, values: Dict[str, Any]) -> Dict[str, Any]:
31
32
  lower_freq_profile_enabled = values.get("lower_freq_profile_enabled")
32
33
  profile_day_of_week = values.get("profile_day_of_week")
@@ -41,7 +42,8 @@ class OperationConfig(ConfigModel):
41
42
  )
42
43
  return values
43
44
 
44
- @pydantic.validator("profile_day_of_week")
45
+ @field_validator("profile_day_of_week", mode="after")
46
+ @classmethod
45
47
  def validate_profile_day_of_week(cls, v: Optional[int]) -> Optional[int]:
46
48
  profile_day_of_week = v
47
49
  if profile_day_of_week is None:
@@ -52,7 +54,8 @@ class OperationConfig(ConfigModel):
52
54
  )
53
55
  return profile_day_of_week
54
56
 
55
- @pydantic.validator("profile_date_of_month")
57
+ @field_validator("profile_date_of_month", mode="after")
58
+ @classmethod
56
59
  def validate_profile_date_of_month(cls, v: Optional[int]) -> Optional[int]:
57
60
  profile_date_of_month = v
58
61
  if profile_date_of_month is None:
@@ -3,7 +3,7 @@ from typing import Dict, List, Optional, Union
3
3
  from urllib.parse import urlparse
4
4
 
5
5
  import pydantic
6
- from pydantic import Field, validator
6
+ from pydantic import Field, model_validator
7
7
 
8
8
  from datahub.configuration.common import AllowDenyPattern
9
9
  from datahub.configuration.source_common import (
@@ -100,27 +100,23 @@ class PulsarSourceConfig(
100
100
  default_factory=dict, description="Placeholder for OpenId discovery document"
101
101
  )
102
102
 
103
- @validator("token")
104
- def ensure_only_issuer_or_token(
105
- cls, token: Optional[str], values: Dict[str, Optional[str]]
106
- ) -> Optional[str]:
107
- if token is not None and values.get("issuer_url") is not None:
103
+ @model_validator(mode="after")
104
+ def ensure_only_issuer_or_token(self) -> "PulsarSourceConfig":
105
+ if self.token is not None and self.issuer_url is not None:
108
106
  raise ValueError(
109
107
  "Expected only one authentication method, either issuer_url or token."
110
108
  )
111
- return token
112
-
113
- @validator("client_secret", always=True)
114
- def ensure_client_id_and_secret_for_issuer_url(
115
- cls, client_secret: Optional[str], values: Dict[str, Optional[str]]
116
- ) -> Optional[str]:
117
- if values.get("issuer_url") is not None and (
118
- client_secret is None or values.get("client_id") is None
109
+ return self
110
+
111
+ @model_validator(mode="after")
112
+ def ensure_client_id_and_secret_for_issuer_url(self) -> "PulsarSourceConfig":
113
+ if self.issuer_url is not None and (
114
+ self.client_secret is None or self.client_id is None
119
115
  ):
120
116
  raise ValueError(
121
117
  "Missing configuration: client_id and client_secret are mandatory when issuer_url is set."
122
118
  )
123
- return client_secret
119
+ return self
124
120
 
125
121
  @pydantic.field_validator("web_service_url", mode="after")
126
122
  @classmethod
@@ -32,7 +32,7 @@ class AddDatasetBrowsePathTransformer(DatasetBrowsePathsTransformer):
32
32
  def create(
33
33
  cls, config_dict: dict, ctx: PipelineContext
34
34
  ) -> "AddDatasetBrowsePathTransformer":
35
- config = AddDatasetBrowsePathConfig.parse_obj(config_dict)
35
+ config = AddDatasetBrowsePathConfig.model_validate(config_dict)
36
36
  return cls(config, ctx)
37
37
 
38
38
  @staticmethod
@@ -1,7 +1,7 @@
1
1
  import logging
2
2
  from typing import Callable, Dict, List, Optional, Union
3
3
 
4
- import pydantic
4
+ from pydantic import model_validator
5
5
 
6
6
  from datahub.configuration.common import ConfigModel, KeyValuePattern
7
7
  from datahub.configuration.import_resolver import pydantic_resolve_key
@@ -39,7 +39,7 @@ class AddDatasetDataProduct(DatasetDataproductTransformer):
39
39
 
40
40
  @classmethod
41
41
  def create(cls, config_dict: dict, ctx: PipelineContext) -> "AddDatasetDataProduct":
42
- config = AddDatasetDataProductConfig.parse_obj(config_dict)
42
+ config = AddDatasetDataProductConfig.model_validate(config_dict)
43
43
  return cls(config, ctx)
44
44
 
45
45
  def transform_aspect(
@@ -116,7 +116,7 @@ class SimpleAddDatasetDataProduct(AddDatasetDataProduct):
116
116
  def create(
117
117
  cls, config_dict: dict, ctx: PipelineContext
118
118
  ) -> "SimpleAddDatasetDataProduct":
119
- config = SimpleDatasetDataProductConfig.parse_obj(config_dict)
119
+ config = SimpleDatasetDataProductConfig.model_validate(config_dict)
120
120
  return cls(config, ctx)
121
121
 
122
122
 
@@ -124,7 +124,8 @@ class PatternDatasetDataProductConfig(ConfigModel):
124
124
  dataset_to_data_product_urns_pattern: KeyValuePattern = KeyValuePattern.all()
125
125
  is_container: bool = False
126
126
 
127
- @pydantic.root_validator(pre=True)
127
+ @model_validator(mode="before")
128
+ @classmethod
128
129
  def validate_pattern_value(cls, values: Dict) -> Dict:
129
130
  rules = values["dataset_to_data_product_urns_pattern"]["rules"]
130
131
  for key, value in rules.items():
@@ -156,5 +157,5 @@ class PatternAddDatasetDataProduct(AddDatasetDataProduct):
156
157
  def create(
157
158
  cls, config_dict: dict, ctx: PipelineContext
158
159
  ) -> "PatternAddDatasetDataProduct":
159
- config = PatternDatasetDataProductConfig.parse_obj(config_dict)
160
+ config = PatternDatasetDataProductConfig.model_validate(config_dict)
160
161
  return cls(config, ctx)
@@ -55,7 +55,7 @@ class AddDatasetOwnership(OwnershipTransformer):
55
55
 
56
56
  @classmethod
57
57
  def create(cls, config_dict: dict, ctx: PipelineContext) -> "AddDatasetOwnership":
58
- config = AddDatasetOwnershipConfig.parse_obj(config_dict)
58
+ config = AddDatasetOwnershipConfig.model_validate(config_dict)
59
59
  return cls(config, ctx)
60
60
 
61
61
  @staticmethod
@@ -209,7 +209,7 @@ class SimpleAddDatasetOwnership(AddDatasetOwnership):
209
209
  def create(
210
210
  cls, config_dict: dict, ctx: PipelineContext
211
211
  ) -> "SimpleAddDatasetOwnership":
212
- config = SimpleDatasetOwnershipConfig.parse_obj(config_dict)
212
+ config = SimpleDatasetOwnershipConfig.model_validate(config_dict)
213
213
  return cls(config, ctx)
214
214
 
215
215
 
@@ -247,5 +247,5 @@ class PatternAddDatasetOwnership(AddDatasetOwnership):
247
247
  def create(
248
248
  cls, config_dict: dict, ctx: PipelineContext
249
249
  ) -> "PatternAddDatasetOwnership":
250
- config = PatternDatasetOwnershipConfig.parse_obj(config_dict)
250
+ config = PatternDatasetOwnershipConfig.model_validate(config_dict)
251
251
  return cls(config, ctx)
@@ -50,7 +50,7 @@ class AddDatasetProperties(DatasetPropertiesTransformer):
50
50
 
51
51
  @classmethod
52
52
  def create(cls, config_dict: dict, ctx: PipelineContext) -> "AddDatasetProperties":
53
- config = AddDatasetPropertiesConfig.parse_obj(config_dict)
53
+ config = AddDatasetPropertiesConfig.model_validate(config_dict)
54
54
  return cls(config, ctx)
55
55
 
56
56
  @staticmethod
@@ -144,5 +144,5 @@ class SimpleAddDatasetProperties(AddDatasetProperties):
144
144
  def create(
145
145
  cls, config_dict: dict, ctx: PipelineContext
146
146
  ) -> "SimpleAddDatasetProperties":
147
- config = SimpleAddDatasetPropertiesConfig.parse_obj(config_dict)
147
+ config = SimpleAddDatasetPropertiesConfig.model_validate(config_dict)
148
148
  return cls(config, ctx)
@@ -38,7 +38,7 @@ class AddDatasetSchemaTags(DatasetSchemaMetadataTransformer):
38
38
 
39
39
  @classmethod
40
40
  def create(cls, config_dict: dict, ctx: PipelineContext) -> "AddDatasetSchemaTags":
41
- config = AddDatasetSchemaTagsConfig.parse_obj(config_dict)
41
+ config = AddDatasetSchemaTagsConfig.model_validate(config_dict)
42
42
  return cls(config, ctx)
43
43
 
44
44
  def extend_field(
@@ -142,5 +142,5 @@ class PatternAddDatasetSchemaTags(AddDatasetSchemaTags):
142
142
  def create(
143
143
  cls, config_dict: dict, ctx: PipelineContext
144
144
  ) -> "PatternAddDatasetSchemaTags":
145
- config = PatternDatasetTagsConfig.parse_obj(config_dict)
145
+ config = PatternDatasetTagsConfig.model_validate(config_dict)
146
146
  return cls(config, ctx)
@@ -39,7 +39,7 @@ class AddDatasetSchemaTerms(DatasetSchemaMetadataTransformer):
39
39
 
40
40
  @classmethod
41
41
  def create(cls, config_dict: dict, ctx: PipelineContext) -> "AddDatasetSchemaTerms":
42
- config = AddDatasetSchemaTermsConfig.parse_obj(config_dict)
42
+ config = AddDatasetSchemaTermsConfig.model_validate(config_dict)
43
43
  return cls(config, ctx)
44
44
 
45
45
  def extend_field(
@@ -162,5 +162,5 @@ class PatternAddDatasetSchemaTerms(AddDatasetSchemaTerms):
162
162
  def create(
163
163
  cls, config_dict: dict, ctx: PipelineContext
164
164
  ) -> "PatternAddDatasetSchemaTerms":
165
- config = PatternDatasetTermsConfig.parse_obj(config_dict)
165
+ config = PatternDatasetTermsConfig.model_validate(config_dict)
166
166
  return cls(config, ctx)
@@ -41,7 +41,7 @@ class AddDatasetTags(DatasetTagsTransformer):
41
41
 
42
42
  @classmethod
43
43
  def create(cls, config_dict: dict, ctx: PipelineContext) -> "AddDatasetTags":
44
- config = AddDatasetTagsConfig.parse_obj(config_dict)
44
+ config = AddDatasetTagsConfig.model_validate(config_dict)
45
45
  return cls(config, ctx)
46
46
 
47
47
  def transform_aspect(
@@ -104,7 +104,7 @@ class SimpleAddDatasetTags(AddDatasetTags):
104
104
 
105
105
  @classmethod
106
106
  def create(cls, config_dict: dict, ctx: PipelineContext) -> "SimpleAddDatasetTags":
107
- config = SimpleDatasetTagConfig.parse_obj(config_dict)
107
+ config = SimpleDatasetTagConfig.model_validate(config_dict)
108
108
  return cls(config, ctx)
109
109
 
110
110
 
@@ -128,5 +128,5 @@ class PatternAddDatasetTags(AddDatasetTags):
128
128
 
129
129
  @classmethod
130
130
  def create(cls, config_dict: dict, ctx: PipelineContext) -> "PatternAddDatasetTags":
131
- config = PatternDatasetTagsConfig.parse_obj(config_dict)
131
+ config = PatternDatasetTagsConfig.model_validate(config_dict)
132
132
  return cls(config, ctx)
@@ -39,7 +39,7 @@ class AddDatasetTerms(DatasetTermsTransformer):
39
39
 
40
40
  @classmethod
41
41
  def create(cls, config_dict: dict, ctx: PipelineContext) -> "AddDatasetTerms":
42
- config = AddDatasetTermsConfig.parse_obj(config_dict)
42
+ config = AddDatasetTermsConfig.model_validate(config_dict)
43
43
  return cls(config, ctx)
44
44
 
45
45
  @staticmethod
@@ -120,7 +120,7 @@ class SimpleAddDatasetTerms(AddDatasetTerms):
120
120
 
121
121
  @classmethod
122
122
  def create(cls, config_dict: dict, ctx: PipelineContext) -> "SimpleAddDatasetTerms":
123
- config = SimpleDatasetTermsConfig.parse_obj(config_dict)
123
+ config = SimpleDatasetTermsConfig.model_validate(config_dict)
124
124
  return cls(config, ctx)
125
125
 
126
126
 
@@ -147,5 +147,5 @@ class PatternAddDatasetTerms(AddDatasetTerms):
147
147
  def create(
148
148
  cls, config_dict: dict, ctx: PipelineContext
149
149
  ) -> "PatternAddDatasetTerms":
150
- config = PatternDatasetTermsConfig.parse_obj(config_dict)
150
+ config = PatternDatasetTermsConfig.model_validate(config_dict)
151
151
  return cls(config, ctx)
@@ -67,7 +67,7 @@ class AddDatasetDomain(DatasetDomainTransformer):
67
67
 
68
68
  @classmethod
69
69
  def create(cls, config_dict: dict, ctx: PipelineContext) -> "AddDatasetDomain":
70
- config = AddDatasetDomainSemanticsConfig.parse_obj(config_dict)
70
+ config = AddDatasetDomainSemanticsConfig.model_validate(config_dict)
71
71
  return cls(config, ctx)
72
72
 
73
73
  @staticmethod
@@ -208,7 +208,7 @@ class SimpleAddDatasetDomain(AddDatasetDomain):
208
208
  def create(
209
209
  cls, config_dict: dict, ctx: PipelineContext
210
210
  ) -> "SimpleAddDatasetDomain":
211
- config = SimpleDatasetDomainSemanticsConfig.parse_obj(config_dict)
211
+ config = SimpleDatasetDomainSemanticsConfig.model_validate(config_dict)
212
212
  return cls(config, ctx)
213
213
 
214
214
 
@@ -238,5 +238,5 @@ class PatternAddDatasetDomain(AddDatasetDomain):
238
238
  def create(
239
239
  cls, config_dict: dict, ctx: PipelineContext
240
240
  ) -> "PatternAddDatasetDomain":
241
- config = PatternDatasetDomainSemanticsConfig.parse_obj(config_dict)
241
+ config = PatternDatasetDomainSemanticsConfig.model_validate(config_dict)
242
242
  return cls(config, ctx)
@@ -27,7 +27,7 @@ class DatasetTagDomainMapper(DatasetDomainTransformer):
27
27
  def create(
28
28
  cls, config_dict: dict, ctx: PipelineContext
29
29
  ) -> "DatasetTagDomainMapper":
30
- config = DatasetTagDomainMapperConfig.parse_obj(config_dict)
30
+ config = DatasetTagDomainMapperConfig.model_validate(config_dict)
31
31
  return cls(config, ctx)
32
32
 
33
33
  def transform_aspect(
@@ -29,7 +29,7 @@ class ExtractDatasetTags(DatasetTagsTransformer):
29
29
 
30
30
  @classmethod
31
31
  def create(cls, config_dict: dict, ctx: PipelineContext) -> "ExtractDatasetTags":
32
- config = ExtractDatasetTagsConfig.parse_obj(config_dict)
32
+ config = ExtractDatasetTagsConfig.model_validate(config_dict)
33
33
  return cls(config, ctx)
34
34
 
35
35
  def _get_tags_to_add(self, entity_urn: str) -> List[TagAssociationClass]:
@@ -62,7 +62,7 @@ class ExtractOwnersFromTagsTransformer(DatasetTagsTransformer):
62
62
  def create(
63
63
  cls, config_dict: dict, ctx: PipelineContext
64
64
  ) -> "ExtractOwnersFromTagsTransformer":
65
- config = ExtractOwnersFromTagsConfig.parse_obj(config_dict)
65
+ config = ExtractOwnersFromTagsConfig.model_validate(config_dict)
66
66
  return cls(config, ctx)
67
67
 
68
68
  def get_owner_urn(self, owner_str: str) -> str:
@@ -24,7 +24,7 @@ class MarkDatasetStatus(DatasetStatusTransformer):
24
24
 
25
25
  @classmethod
26
26
  def create(cls, config_dict: dict, ctx: PipelineContext) -> "MarkDatasetStatus":
27
- config = MarkDatasetStatusConfig.parse_obj(config_dict)
27
+ config = MarkDatasetStatusConfig.model_validate(config_dict)
28
28
  return cls(config, ctx)
29
29
 
30
30
  def transform_aspect(
@@ -38,7 +38,7 @@ class PatternCleanupDatasetUsageUser(DatasetUsageStatisticsTransformer):
38
38
  def create(
39
39
  cls, config_dict: dict, ctx: PipelineContext
40
40
  ) -> "PatternCleanupDatasetUsageUser":
41
- config = PatternCleanupDatasetUsageUserConfig.parse_obj(config_dict)
41
+ config = PatternCleanupDatasetUsageUserConfig.model_validate(config_dict)
42
42
  return cls(config, ctx)
43
43
 
44
44
  def transform_aspect(
@@ -37,7 +37,7 @@ class PatternCleanUpOwnership(OwnershipTransformer):
37
37
  def create(
38
38
  cls, config_dict: dict, ctx: PipelineContext
39
39
  ) -> "PatternCleanUpOwnership":
40
- config = PatternCleanUpOwnershipConfig.parse_obj(config_dict)
40
+ config = PatternCleanUpOwnershipConfig.model_validate(config_dict)
41
41
  return cls(config, ctx)
42
42
 
43
43
  def _get_current_owner_urns(self, entity_urn: str) -> Set[str]:
@@ -21,7 +21,7 @@ class SimpleRemoveDatasetOwnership(OwnershipTransformer):
21
21
  def create(
22
22
  cls, config_dict: dict, ctx: PipelineContext
23
23
  ) -> "SimpleRemoveDatasetOwnership":
24
- config = ClearDatasetOwnershipConfig.parse_obj(config_dict)
24
+ config = ClearDatasetOwnershipConfig.model_validate(config_dict)
25
25
  return cls(config, ctx)
26
26
 
27
27
  def transform_aspect(
@@ -47,7 +47,7 @@ class ReplaceExternalUrlDataset(DatasetPropertiesTransformer, ReplaceUrl):
47
47
  def create(
48
48
  cls, config_dict: dict, ctx: PipelineContext
49
49
  ) -> "ReplaceExternalUrlDataset":
50
- config = ReplaceExternalUrlConfig.parse_obj(config_dict)
50
+ config = ReplaceExternalUrlConfig.model_validate(config_dict)
51
51
  return cls(config, ctx)
52
52
 
53
53
  def transform_aspect(
@@ -97,7 +97,7 @@ class ReplaceExternalUrlContainer(ContainerPropertiesTransformer, ReplaceUrl):
97
97
  def create(
98
98
  cls, config_dict: dict, ctx: PipelineContext
99
99
  ) -> "ReplaceExternalUrlContainer":
100
- config = ReplaceExternalUrlConfig.parse_obj(config_dict)
100
+ config = ReplaceExternalUrlConfig.model_validate(config_dict)
101
101
  return cls(config, ctx)
102
102
 
103
103
  def transform_aspect(
@@ -42,7 +42,7 @@ class SetBrowsePathTransformer(BaseTransformer, SingleAspectTransformer):
42
42
  def create(
43
43
  cls, config_dict: dict, ctx: PipelineContext
44
44
  ) -> "SetBrowsePathTransformer":
45
- config = SetBrowsePathTransformerConfig.parse_obj(config_dict)
45
+ config = SetBrowsePathTransformerConfig.model_validate(config_dict)
46
46
  return cls(config, ctx)
47
47
 
48
48
  @staticmethod
@@ -32,7 +32,7 @@ class TagsToTermMapper(TagsToTermTransformer):
32
32
 
33
33
  @classmethod
34
34
  def create(cls, config_dict: dict, ctx: PipelineContext) -> "TagsToTermMapper":
35
- config = TagsToTermMapperConfig.parse_obj(config_dict)
35
+ config = TagsToTermMapperConfig.model_validate(config_dict)
36
36
  return cls(config, ctx)
37
37
 
38
38
  @staticmethod
@@ -42,7 +42,7 @@ logger = logging.getLogger(__name__)
42
42
  class DuckDBLite(DataHubLiteLocal[DuckDBLiteConfig]):
43
43
  @classmethod
44
44
  def create(cls, config_dict: dict) -> "DuckDBLite":
45
- config: DuckDBLiteConfig = DuckDBLiteConfig.parse_obj(config_dict)
45
+ config: DuckDBLiteConfig = DuckDBLiteConfig.model_validate(config_dict)
46
46
  return DuckDBLite(config)
47
47
 
48
48
  def __init__(self, config: DuckDBLiteConfig) -> None:
datahub/lite/lite_util.py CHANGED
@@ -92,7 +92,7 @@ class DataHubLiteWrapper(DataHubLiteLocal):
92
92
 
93
93
 
94
94
  def get_datahub_lite(config_dict: dict, read_only: bool = False) -> "DataHubLiteLocal":
95
- lite_local_config = LiteLocalConfig.parse_obj(config_dict)
95
+ lite_local_config = LiteLocalConfig.model_validate(config_dict)
96
96
 
97
97
  lite_type = lite_local_config.type
98
98
  try:
@@ -102,7 +102,7 @@ def get_datahub_lite(config_dict: dict, read_only: bool = False) -> "DataHubLite
102
102
  f"Failed to find a registered lite implementation for {lite_type}. Valid values are {[k for k in lite_registry.mapping]}"
103
103
  ) from e
104
104
 
105
- lite_specific_config = lite_class.get_config_class().parse_obj(
105
+ lite_specific_config = lite_class.get_config_class().model_validate(
106
106
  lite_local_config.config
107
107
  )
108
108
  lite = lite_class(lite_specific_config)
@@ -511,6 +511,7 @@ class AssertionInfoClass(_Aspect):
511
511
  source: Union[None, "AssertionSourceClass"]=None,
512
512
  lastUpdated: Union[None, "AuditStampClass"]=None,
513
513
  description: Union[None, str]=None,
514
+ note: Union[None, "AssertionNoteClass"]=None,
514
515
  ):
515
516
  super().__init__()
516
517
 
@@ -531,6 +532,7 @@ class AssertionInfoClass(_Aspect):
531
532
  self.source = source
532
533
  self.lastUpdated = lastUpdated
533
534
  self.description = description
535
+ self.note = note
534
536
 
535
537
  def _restore_defaults(self) -> None:
536
538
  self.customProperties = dict()
@@ -546,6 +548,7 @@ class AssertionInfoClass(_Aspect):
546
548
  self.source = self.RECORD_SCHEMA.fields_dict["source"].default
547
549
  self.lastUpdated = self.RECORD_SCHEMA.fields_dict["lastUpdated"].default
548
550
  self.description = self.RECORD_SCHEMA.fields_dict["description"].default
551
+ self.note = self.RECORD_SCHEMA.fields_dict["note"].default
549
552
 
550
553
 
551
554
  @property
@@ -570,7 +573,7 @@ class AssertionInfoClass(_Aspect):
570
573
 
571
574
  @property
572
575
  def type(self) -> Union[str, "AssertionTypeClass"]:
573
- """Type of assertion. Assertion types can evolve to span Datasets, Flows (Pipelines), Models, Features etc."""
576
+ """Type of assertion."""
574
577
  return self._inner_dict.get('type') # type: ignore
575
578
 
576
579
  @type.setter
@@ -682,6 +685,55 @@ class AssertionInfoClass(_Aspect):
682
685
  self._inner_dict['description'] = value
683
686
 
684
687
 
688
+ @property
689
+ def note(self) -> Union[None, "AssertionNoteClass"]:
690
+ """An optional note to give technical owners more context about the assertion, and how to troubleshoot it.
691
+ The UI will render this in markdown format."""
692
+ return self._inner_dict.get('note') # type: ignore
693
+
694
+ @note.setter
695
+ def note(self, value: Union[None, "AssertionNoteClass"]) -> None:
696
+ self._inner_dict['note'] = value
697
+
698
+
699
+ class AssertionNoteClass(DictWrapper):
700
+ # No docs available.
701
+
702
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.assertion.AssertionNote")
703
+ def __init__(self,
704
+ content: str,
705
+ lastModified: "AuditStampClass",
706
+ ):
707
+ super().__init__()
708
+
709
+ self.content = content
710
+ self.lastModified = lastModified
711
+
712
+ def _restore_defaults(self) -> None:
713
+ self.content = str()
714
+ self.lastModified = AuditStampClass._construct_with_defaults()
715
+
716
+
717
+ @property
718
+ def content(self) -> str:
719
+ """The note to give technical owners more context about the assertion, and how to troubleshoot it."""
720
+ return self._inner_dict.get('content') # type: ignore
721
+
722
+ @content.setter
723
+ def content(self, value: str) -> None:
724
+ self._inner_dict['content'] = value
725
+
726
+
727
+ @property
728
+ def lastModified(self) -> "AuditStampClass":
729
+ """The time at which the note was last modified."""
730
+ return self._inner_dict.get('lastModified') # type: ignore
731
+
732
+ @lastModified.setter
733
+ def lastModified(self, value: "AuditStampClass") -> None:
734
+ self._inner_dict['lastModified'] = value
735
+
736
+
685
737
  class AssertionResultClass(DictWrapper):
686
738
  """The result of running an assertion"""
687
739
 
@@ -1337,7 +1389,7 @@ class AssertionStdParametersClass(DictWrapper):
1337
1389
 
1338
1390
 
1339
1391
  class AssertionTypeClass(object):
1340
- # No docs available.
1392
+ """Type of assertion. Assertion types can evolve to span Datasets, Flows (Pipelines), Models, Features etc."""
1341
1393
 
1342
1394
  DATASET = "DATASET"
1343
1395
  """A single-dataset assertion.
@@ -12623,6 +12675,9 @@ class NotificationSinkTypeClass(object):
12623
12675
  EMAIL = "EMAIL"
12624
12676
  """Email target type."""
12625
12677
 
12678
+ TEAMS = "TEAMS"
12679
+ """Microsoft Teams target type."""
12680
+
12626
12681
 
12627
12682
 
12628
12683
  class EmailNotificationSettingsClass(DictWrapper):
@@ -20439,6 +20494,9 @@ class DataHubPageModuleTypeClass(object):
20439
20494
  PLATFORMS = "PLATFORMS"
20440
20495
  """Module displaying the platforms in an instance"""
20441
20496
 
20497
+ UNKNOWN = "UNKNOWN"
20498
+ """Unknown module type - this can occur with corrupted data or rolling back to versions without new modules"""
20499
+
20442
20500
 
20443
20501
 
20444
20502
  class DataHubPageModuleVisibilityClass(DictWrapper):
@@ -27742,6 +27800,7 @@ __SCHEMA_TYPES = {
27742
27800
  'com.linkedin.pegasus2avro.assertion.AssertionActionType': AssertionActionTypeClass,
27743
27801
  'com.linkedin.pegasus2avro.assertion.AssertionActions': AssertionActionsClass,
27744
27802
  'com.linkedin.pegasus2avro.assertion.AssertionInfo': AssertionInfoClass,
27803
+ 'com.linkedin.pegasus2avro.assertion.AssertionNote': AssertionNoteClass,
27745
27804
  'com.linkedin.pegasus2avro.assertion.AssertionResult': AssertionResultClass,
27746
27805
  'com.linkedin.pegasus2avro.assertion.AssertionResultError': AssertionResultErrorClass,
27747
27806
  'com.linkedin.pegasus2avro.assertion.AssertionResultErrorType': AssertionResultErrorTypeClass,
@@ -28268,6 +28327,7 @@ __SCHEMA_TYPES = {
28268
28327
  'AssertionActionType': AssertionActionTypeClass,
28269
28328
  'AssertionActions': AssertionActionsClass,
28270
28329
  'AssertionInfo': AssertionInfoClass,
28330
+ 'AssertionNote': AssertionNoteClass,
28271
28331
  'AssertionResult': AssertionResultClass,
28272
28332
  'AssertionResultError': AssertionResultErrorClass,
28273
28333
  'AssertionResultErrorType': AssertionResultErrorTypeClass,
@@ -11,6 +11,7 @@ from .....schema_classes import AssertionActionClass
11
11
  from .....schema_classes import AssertionActionTypeClass
12
12
  from .....schema_classes import AssertionActionsClass
13
13
  from .....schema_classes import AssertionInfoClass
14
+ from .....schema_classes import AssertionNoteClass
14
15
  from .....schema_classes import AssertionResultClass
15
16
  from .....schema_classes import AssertionResultErrorClass
16
17
  from .....schema_classes import AssertionResultErrorTypeClass
@@ -64,6 +65,7 @@ AssertionAction = AssertionActionClass
64
65
  AssertionActionType = AssertionActionTypeClass
65
66
  AssertionActions = AssertionActionsClass
66
67
  AssertionInfo = AssertionInfoClass
68
+ AssertionNote = AssertionNoteClass
67
69
  AssertionResult = AssertionResultClass
68
70
  AssertionResultError = AssertionResultErrorClass
69
71
  AssertionResultErrorType = AssertionResultErrorTypeClass