acryl-datahub 1.3.0.1rc9__py3-none-any.whl → 1.3.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of acryl-datahub might be problematic. Click here for more details.
- {acryl_datahub-1.3.0.1rc9.dist-info → acryl_datahub-1.3.1.1.dist-info}/METADATA +2550 -2543
- {acryl_datahub-1.3.0.1rc9.dist-info → acryl_datahub-1.3.1.1.dist-info}/RECORD +263 -261
- datahub/_version.py +1 -1
- datahub/api/entities/common/serialized_value.py +2 -2
- datahub/api/entities/corpgroup/corpgroup.py +11 -6
- datahub/api/entities/corpuser/corpuser.py +11 -11
- datahub/api/entities/dataproduct/dataproduct.py +47 -27
- datahub/api/entities/dataset/dataset.py +32 -21
- datahub/api/entities/external/lake_formation_external_entites.py +5 -6
- datahub/api/entities/external/unity_catalog_external_entites.py +5 -7
- datahub/api/entities/forms/forms.py +16 -14
- datahub/api/entities/structuredproperties/structuredproperties.py +23 -16
- datahub/cli/check_cli.py +2 -2
- datahub/cli/config_utils.py +3 -3
- datahub/cli/lite_cli.py +9 -7
- datahub/cli/migrate.py +4 -4
- datahub/cli/quickstart_versioning.py +3 -3
- datahub/cli/specific/group_cli.py +1 -1
- datahub/cli/specific/structuredproperties_cli.py +1 -1
- datahub/cli/specific/user_cli.py +1 -1
- datahub/configuration/common.py +14 -2
- datahub/configuration/connection_resolver.py +2 -2
- datahub/configuration/git.py +47 -30
- datahub/configuration/import_resolver.py +2 -2
- datahub/configuration/kafka.py +4 -3
- datahub/configuration/time_window_config.py +26 -26
- datahub/configuration/validate_field_deprecation.py +2 -2
- datahub/configuration/validate_field_removal.py +2 -2
- datahub/configuration/validate_field_rename.py +2 -2
- datahub/configuration/validate_multiline_string.py +2 -1
- datahub/emitter/kafka_emitter.py +3 -1
- datahub/emitter/rest_emitter.py +2 -4
- datahub/ingestion/api/decorators.py +1 -1
- datahub/ingestion/api/report.py +1 -1
- datahub/ingestion/api/sink.py +1 -1
- datahub/ingestion/api/source.py +1 -1
- datahub/ingestion/glossary/datahub_classifier.py +11 -8
- datahub/ingestion/graph/client.py +5 -1
- datahub/ingestion/reporting/datahub_ingestion_run_summary_provider.py +1 -1
- datahub/ingestion/reporting/file_reporter.py +5 -4
- datahub/ingestion/run/pipeline.py +7 -6
- datahub/ingestion/run/pipeline_config.py +12 -14
- datahub/ingestion/run/sink_callback.py +1 -1
- datahub/ingestion/sink/datahub_rest.py +6 -4
- datahub/ingestion/source/abs/config.py +19 -19
- datahub/ingestion/source/abs/datalake_profiler_config.py +11 -13
- datahub/ingestion/source/abs/source.py +2 -2
- datahub/ingestion/source/aws/aws_common.py +1 -1
- datahub/ingestion/source/aws/glue.py +6 -4
- datahub/ingestion/source/aws/sagemaker.py +1 -1
- datahub/ingestion/source/azure/azure_common.py +8 -12
- datahub/ingestion/source/bigquery_v2/bigquery.py +1 -1
- datahub/ingestion/source/bigquery_v2/bigquery_config.py +43 -30
- datahub/ingestion/source/bigquery_v2/bigquery_queries.py +1 -1
- datahub/ingestion/source/cassandra/cassandra.py +1 -1
- datahub/ingestion/source/common/gcp_credentials_config.py +10 -10
- datahub/ingestion/source/data_lake_common/path_spec.py +85 -89
- datahub/ingestion/source/datahub/config.py +8 -8
- datahub/ingestion/source/datahub/datahub_source.py +1 -1
- datahub/ingestion/source/dbt/dbt_cloud.py +9 -3
- datahub/ingestion/source/dbt/dbt_common.py +39 -37
- datahub/ingestion/source/dbt/dbt_core.py +10 -12
- datahub/ingestion/source/debug/datahub_debug.py +1 -1
- datahub/ingestion/source/delta_lake/config.py +6 -4
- datahub/ingestion/source/dremio/dremio_api.py +212 -78
- datahub/ingestion/source/dremio/dremio_config.py +10 -6
- datahub/ingestion/source/dremio/dremio_entities.py +55 -39
- datahub/ingestion/source/dremio/dremio_profiling.py +14 -3
- datahub/ingestion/source/dremio/dremio_source.py +24 -26
- datahub/ingestion/source/dynamodb/dynamodb.py +1 -1
- datahub/ingestion/source/elastic_search.py +110 -32
- datahub/ingestion/source/excel/source.py +1 -1
- datahub/ingestion/source/feast.py +1 -1
- datahub/ingestion/source/file.py +5 -4
- datahub/ingestion/source/fivetran/config.py +17 -16
- datahub/ingestion/source/fivetran/fivetran.py +2 -2
- datahub/ingestion/source/gc/datahub_gc.py +1 -1
- datahub/ingestion/source/gcs/gcs_source.py +8 -10
- datahub/ingestion/source/ge_profiling_config.py +8 -5
- datahub/ingestion/source/grafana/grafana_api.py +2 -2
- datahub/ingestion/source/grafana/grafana_config.py +4 -3
- datahub/ingestion/source/grafana/grafana_source.py +1 -1
- datahub/ingestion/source/grafana/models.py +23 -5
- datahub/ingestion/source/hex/api.py +7 -5
- datahub/ingestion/source/hex/hex.py +4 -3
- datahub/ingestion/source/iceberg/iceberg.py +1 -1
- datahub/ingestion/source/iceberg/iceberg_common.py +5 -3
- datahub/ingestion/source/identity/azure_ad.py +1 -1
- datahub/ingestion/source/identity/okta.py +10 -10
- datahub/ingestion/source/kafka/kafka.py +1 -1
- datahub/ingestion/source/ldap.py +1 -1
- datahub/ingestion/source/looker/looker_common.py +7 -5
- datahub/ingestion/source/looker/looker_config.py +21 -20
- datahub/ingestion/source/looker/lookml_config.py +47 -47
- datahub/ingestion/source/metabase.py +8 -8
- datahub/ingestion/source/metadata/business_glossary.py +2 -2
- datahub/ingestion/source/metadata/lineage.py +13 -8
- datahub/ingestion/source/mlflow.py +1 -1
- datahub/ingestion/source/mode.py +6 -4
- datahub/ingestion/source/mongodb.py +4 -3
- datahub/ingestion/source/neo4j/neo4j_source.py +1 -1
- datahub/ingestion/source/nifi.py +17 -23
- datahub/ingestion/source/openapi.py +6 -8
- datahub/ingestion/source/powerbi/config.py +33 -32
- datahub/ingestion/source/powerbi/dataplatform_instance_resolver.py +2 -2
- datahub/ingestion/source/powerbi/powerbi.py +1 -1
- datahub/ingestion/source/powerbi_report_server/report_server.py +2 -2
- datahub/ingestion/source/powerbi_report_server/report_server_domain.py +8 -6
- datahub/ingestion/source/preset.py +8 -8
- datahub/ingestion/source/pulsar.py +1 -1
- datahub/ingestion/source/qlik_sense/data_classes.py +15 -8
- datahub/ingestion/source/qlik_sense/qlik_api.py +7 -7
- datahub/ingestion/source/qlik_sense/qlik_sense.py +1 -1
- datahub/ingestion/source/redshift/config.py +18 -20
- datahub/ingestion/source/redshift/redshift.py +2 -2
- datahub/ingestion/source/redshift/usage.py +23 -3
- datahub/ingestion/source/s3/config.py +83 -62
- datahub/ingestion/source/s3/datalake_profiler_config.py +11 -13
- datahub/ingestion/source/s3/source.py +8 -5
- datahub/ingestion/source/sac/sac.py +5 -4
- datahub/ingestion/source/salesforce.py +3 -2
- datahub/ingestion/source/schema/json_schema.py +2 -2
- datahub/ingestion/source/sigma/data_classes.py +3 -2
- datahub/ingestion/source/sigma/sigma.py +1 -1
- datahub/ingestion/source/sigma/sigma_api.py +7 -7
- datahub/ingestion/source/slack/slack.py +1 -1
- datahub/ingestion/source/snaplogic/snaplogic.py +1 -1
- datahub/ingestion/source/snowflake/snowflake_assertion.py +1 -1
- datahub/ingestion/source/snowflake/snowflake_config.py +35 -31
- datahub/ingestion/source/snowflake/snowflake_connection.py +35 -13
- datahub/ingestion/source/snowflake/snowflake_lineage_v2.py +3 -3
- datahub/ingestion/source/snowflake/snowflake_queries.py +28 -4
- datahub/ingestion/source/sql/athena.py +1 -1
- datahub/ingestion/source/sql/clickhouse.py +4 -2
- datahub/ingestion/source/sql/cockroachdb.py +1 -1
- datahub/ingestion/source/sql/druid.py +1 -1
- datahub/ingestion/source/sql/hana.py +1 -1
- datahub/ingestion/source/sql/hive.py +7 -5
- datahub/ingestion/source/sql/hive_metastore.py +1 -1
- datahub/ingestion/source/sql/mssql/source.py +13 -6
- datahub/ingestion/source/sql/mysql.py +1 -1
- datahub/ingestion/source/sql/oracle.py +17 -10
- datahub/ingestion/source/sql/postgres.py +2 -2
- datahub/ingestion/source/sql/presto.py +1 -1
- datahub/ingestion/source/sql/sql_config.py +8 -9
- datahub/ingestion/source/sql/sql_generic.py +1 -1
- datahub/ingestion/source/sql/teradata.py +1 -1
- datahub/ingestion/source/sql/trino.py +1 -1
- datahub/ingestion/source/sql/vertica.py +5 -4
- datahub/ingestion/source/sql_queries.py +174 -22
- datahub/ingestion/source/state/checkpoint.py +2 -2
- datahub/ingestion/source/state/entity_removal_state.py +2 -1
- datahub/ingestion/source/state/stateful_ingestion_base.py +55 -45
- datahub/ingestion/source/state_provider/datahub_ingestion_checkpointing_provider.py +1 -1
- datahub/ingestion/source/state_provider/file_ingestion_checkpointing_provider.py +1 -1
- datahub/ingestion/source/superset.py +9 -9
- datahub/ingestion/source/tableau/tableau.py +14 -16
- datahub/ingestion/source/unity/azure_auth_config.py +15 -0
- datahub/ingestion/source/unity/config.py +51 -34
- datahub/ingestion/source/unity/connection.py +7 -1
- datahub/ingestion/source/unity/connection_test.py +1 -1
- datahub/ingestion/source/unity/proxy.py +216 -7
- datahub/ingestion/source/unity/proxy_types.py +91 -0
- datahub/ingestion/source/unity/source.py +29 -3
- datahub/ingestion/source/usage/clickhouse_usage.py +1 -1
- datahub/ingestion/source/usage/starburst_trino_usage.py +1 -1
- datahub/ingestion/source/usage/usage_common.py +5 -3
- datahub/ingestion/source_config/csv_enricher.py +7 -6
- datahub/ingestion/source_config/operation_config.py +7 -4
- datahub/ingestion/source_config/pulsar.py +11 -15
- datahub/ingestion/transformer/add_dataset_browse_path.py +1 -1
- datahub/ingestion/transformer/add_dataset_dataproduct.py +6 -5
- datahub/ingestion/transformer/add_dataset_ownership.py +3 -3
- datahub/ingestion/transformer/add_dataset_properties.py +2 -2
- datahub/ingestion/transformer/add_dataset_schema_tags.py +2 -2
- datahub/ingestion/transformer/add_dataset_schema_terms.py +2 -2
- datahub/ingestion/transformer/add_dataset_tags.py +3 -3
- datahub/ingestion/transformer/add_dataset_terms.py +3 -3
- datahub/ingestion/transformer/dataset_domain.py +3 -3
- datahub/ingestion/transformer/dataset_domain_based_on_tags.py +1 -1
- datahub/ingestion/transformer/extract_dataset_tags.py +1 -1
- datahub/ingestion/transformer/extract_ownership_from_tags.py +1 -1
- datahub/ingestion/transformer/mark_dataset_status.py +1 -1
- datahub/ingestion/transformer/pattern_cleanup_dataset_usage_user.py +1 -1
- datahub/ingestion/transformer/pattern_cleanup_ownership.py +1 -1
- datahub/ingestion/transformer/remove_dataset_ownership.py +1 -1
- datahub/ingestion/transformer/replace_external_url.py +2 -2
- datahub/ingestion/transformer/set_browse_path.py +1 -1
- datahub/ingestion/transformer/tags_to_terms.py +1 -1
- datahub/lite/duckdb_lite.py +1 -1
- datahub/lite/lite_util.py +2 -2
- datahub/metadata/_internal_schema_classes.py +62 -2
- datahub/metadata/com/linkedin/pegasus2avro/assertion/__init__.py +2 -0
- datahub/metadata/schema.avsc +271 -91
- datahub/metadata/schemas/ApplicationProperties.avsc +5 -2
- datahub/metadata/schemas/AssertionInfo.avsc +48 -5
- datahub/metadata/schemas/BusinessAttributeInfo.avsc +8 -4
- datahub/metadata/schemas/ChartInfo.avsc +12 -5
- datahub/metadata/schemas/ContainerProperties.avsc +12 -5
- datahub/metadata/schemas/CorpGroupEditableInfo.avsc +2 -1
- datahub/metadata/schemas/CorpGroupInfo.avsc +7 -3
- datahub/metadata/schemas/CorpUserInfo.avsc +5 -2
- datahub/metadata/schemas/CorpUserSettings.avsc +4 -2
- datahub/metadata/schemas/DashboardInfo.avsc +16 -4
- datahub/metadata/schemas/DataFlowInfo.avsc +11 -5
- datahub/metadata/schemas/DataHubPageModuleProperties.avsc +4 -2
- datahub/metadata/schemas/DataJobInfo.avsc +9 -4
- datahub/metadata/schemas/DataPlatformInfo.avsc +3 -1
- datahub/metadata/schemas/DataPlatformInstanceProperties.avsc +5 -2
- datahub/metadata/schemas/DataProductProperties.avsc +5 -2
- datahub/metadata/schemas/DataTypeInfo.avsc +5 -0
- datahub/metadata/schemas/DatasetKey.avsc +2 -1
- datahub/metadata/schemas/DatasetProperties.avsc +12 -5
- datahub/metadata/schemas/DomainProperties.avsc +7 -3
- datahub/metadata/schemas/EditableContainerProperties.avsc +2 -1
- datahub/metadata/schemas/EditableDashboardProperties.avsc +2 -1
- datahub/metadata/schemas/EditableDataFlowProperties.avsc +2 -1
- datahub/metadata/schemas/EditableDataJobProperties.avsc +2 -1
- datahub/metadata/schemas/EditableDatasetProperties.avsc +2 -1
- datahub/metadata/schemas/EditableERModelRelationshipProperties.avsc +2 -1
- datahub/metadata/schemas/EditableMLFeatureProperties.avsc +2 -1
- datahub/metadata/schemas/EditableMLFeatureTableProperties.avsc +2 -1
- datahub/metadata/schemas/EditableMLModelGroupProperties.avsc +2 -1
- datahub/metadata/schemas/EditableMLModelProperties.avsc +2 -1
- datahub/metadata/schemas/EditableNotebookProperties.avsc +2 -1
- datahub/metadata/schemas/EditableSchemaMetadata.avsc +5 -3
- datahub/metadata/schemas/EntityTypeInfo.avsc +5 -0
- datahub/metadata/schemas/GlobalTags.avsc +3 -2
- datahub/metadata/schemas/GlossaryNodeInfo.avsc +3 -1
- datahub/metadata/schemas/GlossaryTermInfo.avsc +3 -1
- datahub/metadata/schemas/InputFields.avsc +3 -2
- datahub/metadata/schemas/MLFeatureKey.avsc +3 -1
- datahub/metadata/schemas/MLFeatureTableKey.avsc +3 -1
- datahub/metadata/schemas/MLModelDeploymentKey.avsc +3 -1
- datahub/metadata/schemas/MLModelGroupKey.avsc +3 -1
- datahub/metadata/schemas/MLModelKey.avsc +3 -1
- datahub/metadata/schemas/MLModelProperties.avsc +4 -2
- datahub/metadata/schemas/MLPrimaryKeyKey.avsc +3 -1
- datahub/metadata/schemas/MetadataChangeEvent.avsc +124 -50
- datahub/metadata/schemas/NotebookInfo.avsc +5 -2
- datahub/metadata/schemas/Ownership.avsc +3 -2
- datahub/metadata/schemas/QuerySubjects.avsc +1 -1
- datahub/metadata/schemas/RoleProperties.avsc +3 -1
- datahub/metadata/schemas/SchemaFieldInfo.avsc +3 -1
- datahub/metadata/schemas/SchemaMetadata.avsc +3 -2
- datahub/metadata/schemas/StructuredPropertyDefinition.avsc +15 -4
- datahub/metadata/schemas/TagProperties.avsc +3 -1
- datahub/metadata/schemas/TestInfo.avsc +2 -1
- datahub/sdk/__init__.py +1 -0
- datahub/sdk/_all_entities.py +2 -0
- datahub/sdk/search_filters.py +68 -40
- datahub/sdk/tag.py +112 -0
- datahub/secret/datahub_secret_store.py +7 -4
- datahub/secret/file_secret_store.py +1 -1
- datahub/sql_parsing/schema_resolver.py +29 -0
- datahub/sql_parsing/sql_parsing_aggregator.py +15 -0
- datahub/sql_parsing/sqlglot_lineage.py +5 -2
- datahub/testing/check_sql_parser_result.py +2 -2
- datahub/utilities/ingest_utils.py +1 -1
- {acryl_datahub-1.3.0.1rc9.dist-info → acryl_datahub-1.3.1.1.dist-info}/WHEEL +0 -0
- {acryl_datahub-1.3.0.1rc9.dist-info → acryl_datahub-1.3.1.1.dist-info}/entry_points.txt +0 -0
- {acryl_datahub-1.3.0.1rc9.dist-info → acryl_datahub-1.3.1.1.dist-info}/licenses/LICENSE +0 -0
- {acryl_datahub-1.3.0.1rc9.dist-info → acryl_datahub-1.3.1.1.dist-info}/top_level.txt +0 -0
|
@@ -15,6 +15,7 @@ from typing import (
|
|
|
15
15
|
)
|
|
16
16
|
|
|
17
17
|
import pydantic
|
|
18
|
+
from pydantic import ValidationInfo, field_validator
|
|
18
19
|
from pydantic.fields import Field
|
|
19
20
|
|
|
20
21
|
import datahub.emitter.mce_builder as builder
|
|
@@ -226,10 +227,11 @@ class BaseUsageConfig(BaseTimeWindowConfig):
|
|
|
226
227
|
default=True, description="Whether to ingest the top_n_queries."
|
|
227
228
|
)
|
|
228
229
|
|
|
229
|
-
@
|
|
230
|
-
|
|
230
|
+
@field_validator("top_n_queries", mode="after")
|
|
231
|
+
@classmethod
|
|
232
|
+
def ensure_top_n_queries_is_not_too_big(cls, v: int, info: ValidationInfo) -> int:
|
|
231
233
|
minimum_query_size = 20
|
|
232
|
-
|
|
234
|
+
values = info.data
|
|
233
235
|
max_queries = int(values["queries_character_limit"] / minimum_query_size)
|
|
234
236
|
if v > max_queries:
|
|
235
237
|
raise ValueError(
|
|
@@ -1,6 +1,5 @@
|
|
|
1
|
-
from typing import Any, Dict
|
|
2
|
-
|
|
3
1
|
import pydantic
|
|
2
|
+
from pydantic import field_validator
|
|
4
3
|
|
|
5
4
|
from datahub.configuration.common import ConfigModel
|
|
6
5
|
|
|
@@ -21,7 +20,8 @@ class CSVEnricherConfig(ConfigModel):
|
|
|
21
20
|
description="Delimiter to use when parsing array fields (tags, terms and owners)",
|
|
22
21
|
)
|
|
23
22
|
|
|
24
|
-
@
|
|
23
|
+
@field_validator("write_semantics", mode="after")
|
|
24
|
+
@classmethod
|
|
25
25
|
def validate_write_semantics(cls, write_semantics: str) -> str:
|
|
26
26
|
if write_semantics.lower() not in {"patch", "override"}:
|
|
27
27
|
raise ValueError(
|
|
@@ -31,9 +31,10 @@ class CSVEnricherConfig(ConfigModel):
|
|
|
31
31
|
)
|
|
32
32
|
return write_semantics
|
|
33
33
|
|
|
34
|
-
@
|
|
35
|
-
|
|
36
|
-
|
|
34
|
+
@field_validator("array_delimiter", mode="after")
|
|
35
|
+
@classmethod
|
|
36
|
+
def validator_diff(cls, array_delimiter: str, info: pydantic.ValidationInfo) -> str:
|
|
37
|
+
if array_delimiter == info.data["delimiter"]:
|
|
37
38
|
raise ValueError(
|
|
38
39
|
"array_delimiter and delimiter are the same. Please choose different delimiters."
|
|
39
40
|
)
|
|
@@ -3,7 +3,7 @@ import logging
|
|
|
3
3
|
from typing import Any, Dict, Optional
|
|
4
4
|
|
|
5
5
|
import cachetools
|
|
6
|
-
import
|
|
6
|
+
from pydantic import field_validator, model_validator
|
|
7
7
|
from pydantic.fields import Field
|
|
8
8
|
|
|
9
9
|
from datahub.configuration.common import ConfigModel
|
|
@@ -26,7 +26,8 @@ class OperationConfig(ConfigModel):
|
|
|
26
26
|
description="Number between 1 to 31 for date of month (both inclusive). If not specified, defaults to Nothing and this field does not take affect.",
|
|
27
27
|
)
|
|
28
28
|
|
|
29
|
-
@
|
|
29
|
+
@model_validator(mode="before")
|
|
30
|
+
@classmethod
|
|
30
31
|
def lower_freq_configs_are_set(cls, values: Dict[str, Any]) -> Dict[str, Any]:
|
|
31
32
|
lower_freq_profile_enabled = values.get("lower_freq_profile_enabled")
|
|
32
33
|
profile_day_of_week = values.get("profile_day_of_week")
|
|
@@ -41,7 +42,8 @@ class OperationConfig(ConfigModel):
|
|
|
41
42
|
)
|
|
42
43
|
return values
|
|
43
44
|
|
|
44
|
-
@
|
|
45
|
+
@field_validator("profile_day_of_week", mode="after")
|
|
46
|
+
@classmethod
|
|
45
47
|
def validate_profile_day_of_week(cls, v: Optional[int]) -> Optional[int]:
|
|
46
48
|
profile_day_of_week = v
|
|
47
49
|
if profile_day_of_week is None:
|
|
@@ -52,7 +54,8 @@ class OperationConfig(ConfigModel):
|
|
|
52
54
|
)
|
|
53
55
|
return profile_day_of_week
|
|
54
56
|
|
|
55
|
-
@
|
|
57
|
+
@field_validator("profile_date_of_month", mode="after")
|
|
58
|
+
@classmethod
|
|
56
59
|
def validate_profile_date_of_month(cls, v: Optional[int]) -> Optional[int]:
|
|
57
60
|
profile_date_of_month = v
|
|
58
61
|
if profile_date_of_month is None:
|
|
@@ -3,7 +3,7 @@ from typing import Dict, List, Optional, Union
|
|
|
3
3
|
from urllib.parse import urlparse
|
|
4
4
|
|
|
5
5
|
import pydantic
|
|
6
|
-
from pydantic import Field,
|
|
6
|
+
from pydantic import Field, model_validator
|
|
7
7
|
|
|
8
8
|
from datahub.configuration.common import AllowDenyPattern
|
|
9
9
|
from datahub.configuration.source_common import (
|
|
@@ -100,27 +100,23 @@ class PulsarSourceConfig(
|
|
|
100
100
|
default_factory=dict, description="Placeholder for OpenId discovery document"
|
|
101
101
|
)
|
|
102
102
|
|
|
103
|
-
@
|
|
104
|
-
def ensure_only_issuer_or_token(
|
|
105
|
-
|
|
106
|
-
) -> Optional[str]:
|
|
107
|
-
if token is not None and values.get("issuer_url") is not None:
|
|
103
|
+
@model_validator(mode="after")
|
|
104
|
+
def ensure_only_issuer_or_token(self) -> "PulsarSourceConfig":
|
|
105
|
+
if self.token is not None and self.issuer_url is not None:
|
|
108
106
|
raise ValueError(
|
|
109
107
|
"Expected only one authentication method, either issuer_url or token."
|
|
110
108
|
)
|
|
111
|
-
return
|
|
112
|
-
|
|
113
|
-
@
|
|
114
|
-
def ensure_client_id_and_secret_for_issuer_url(
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
if values.get("issuer_url") is not None and (
|
|
118
|
-
client_secret is None or values.get("client_id") is None
|
|
109
|
+
return self
|
|
110
|
+
|
|
111
|
+
@model_validator(mode="after")
|
|
112
|
+
def ensure_client_id_and_secret_for_issuer_url(self) -> "PulsarSourceConfig":
|
|
113
|
+
if self.issuer_url is not None and (
|
|
114
|
+
self.client_secret is None or self.client_id is None
|
|
119
115
|
):
|
|
120
116
|
raise ValueError(
|
|
121
117
|
"Missing configuration: client_id and client_secret are mandatory when issuer_url is set."
|
|
122
118
|
)
|
|
123
|
-
return
|
|
119
|
+
return self
|
|
124
120
|
|
|
125
121
|
@pydantic.field_validator("web_service_url", mode="after")
|
|
126
122
|
@classmethod
|
|
@@ -32,7 +32,7 @@ class AddDatasetBrowsePathTransformer(DatasetBrowsePathsTransformer):
|
|
|
32
32
|
def create(
|
|
33
33
|
cls, config_dict: dict, ctx: PipelineContext
|
|
34
34
|
) -> "AddDatasetBrowsePathTransformer":
|
|
35
|
-
config = AddDatasetBrowsePathConfig.
|
|
35
|
+
config = AddDatasetBrowsePathConfig.model_validate(config_dict)
|
|
36
36
|
return cls(config, ctx)
|
|
37
37
|
|
|
38
38
|
@staticmethod
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
from typing import Callable, Dict, List, Optional, Union
|
|
3
3
|
|
|
4
|
-
import
|
|
4
|
+
from pydantic import model_validator
|
|
5
5
|
|
|
6
6
|
from datahub.configuration.common import ConfigModel, KeyValuePattern
|
|
7
7
|
from datahub.configuration.import_resolver import pydantic_resolve_key
|
|
@@ -39,7 +39,7 @@ class AddDatasetDataProduct(DatasetDataproductTransformer):
|
|
|
39
39
|
|
|
40
40
|
@classmethod
|
|
41
41
|
def create(cls, config_dict: dict, ctx: PipelineContext) -> "AddDatasetDataProduct":
|
|
42
|
-
config = AddDatasetDataProductConfig.
|
|
42
|
+
config = AddDatasetDataProductConfig.model_validate(config_dict)
|
|
43
43
|
return cls(config, ctx)
|
|
44
44
|
|
|
45
45
|
def transform_aspect(
|
|
@@ -116,7 +116,7 @@ class SimpleAddDatasetDataProduct(AddDatasetDataProduct):
|
|
|
116
116
|
def create(
|
|
117
117
|
cls, config_dict: dict, ctx: PipelineContext
|
|
118
118
|
) -> "SimpleAddDatasetDataProduct":
|
|
119
|
-
config = SimpleDatasetDataProductConfig.
|
|
119
|
+
config = SimpleDatasetDataProductConfig.model_validate(config_dict)
|
|
120
120
|
return cls(config, ctx)
|
|
121
121
|
|
|
122
122
|
|
|
@@ -124,7 +124,8 @@ class PatternDatasetDataProductConfig(ConfigModel):
|
|
|
124
124
|
dataset_to_data_product_urns_pattern: KeyValuePattern = KeyValuePattern.all()
|
|
125
125
|
is_container: bool = False
|
|
126
126
|
|
|
127
|
-
@
|
|
127
|
+
@model_validator(mode="before")
|
|
128
|
+
@classmethod
|
|
128
129
|
def validate_pattern_value(cls, values: Dict) -> Dict:
|
|
129
130
|
rules = values["dataset_to_data_product_urns_pattern"]["rules"]
|
|
130
131
|
for key, value in rules.items():
|
|
@@ -156,5 +157,5 @@ class PatternAddDatasetDataProduct(AddDatasetDataProduct):
|
|
|
156
157
|
def create(
|
|
157
158
|
cls, config_dict: dict, ctx: PipelineContext
|
|
158
159
|
) -> "PatternAddDatasetDataProduct":
|
|
159
|
-
config = PatternDatasetDataProductConfig.
|
|
160
|
+
config = PatternDatasetDataProductConfig.model_validate(config_dict)
|
|
160
161
|
return cls(config, ctx)
|
|
@@ -55,7 +55,7 @@ class AddDatasetOwnership(OwnershipTransformer):
|
|
|
55
55
|
|
|
56
56
|
@classmethod
|
|
57
57
|
def create(cls, config_dict: dict, ctx: PipelineContext) -> "AddDatasetOwnership":
|
|
58
|
-
config = AddDatasetOwnershipConfig.
|
|
58
|
+
config = AddDatasetOwnershipConfig.model_validate(config_dict)
|
|
59
59
|
return cls(config, ctx)
|
|
60
60
|
|
|
61
61
|
@staticmethod
|
|
@@ -209,7 +209,7 @@ class SimpleAddDatasetOwnership(AddDatasetOwnership):
|
|
|
209
209
|
def create(
|
|
210
210
|
cls, config_dict: dict, ctx: PipelineContext
|
|
211
211
|
) -> "SimpleAddDatasetOwnership":
|
|
212
|
-
config = SimpleDatasetOwnershipConfig.
|
|
212
|
+
config = SimpleDatasetOwnershipConfig.model_validate(config_dict)
|
|
213
213
|
return cls(config, ctx)
|
|
214
214
|
|
|
215
215
|
|
|
@@ -247,5 +247,5 @@ class PatternAddDatasetOwnership(AddDatasetOwnership):
|
|
|
247
247
|
def create(
|
|
248
248
|
cls, config_dict: dict, ctx: PipelineContext
|
|
249
249
|
) -> "PatternAddDatasetOwnership":
|
|
250
|
-
config = PatternDatasetOwnershipConfig.
|
|
250
|
+
config = PatternDatasetOwnershipConfig.model_validate(config_dict)
|
|
251
251
|
return cls(config, ctx)
|
|
@@ -50,7 +50,7 @@ class AddDatasetProperties(DatasetPropertiesTransformer):
|
|
|
50
50
|
|
|
51
51
|
@classmethod
|
|
52
52
|
def create(cls, config_dict: dict, ctx: PipelineContext) -> "AddDatasetProperties":
|
|
53
|
-
config = AddDatasetPropertiesConfig.
|
|
53
|
+
config = AddDatasetPropertiesConfig.model_validate(config_dict)
|
|
54
54
|
return cls(config, ctx)
|
|
55
55
|
|
|
56
56
|
@staticmethod
|
|
@@ -144,5 +144,5 @@ class SimpleAddDatasetProperties(AddDatasetProperties):
|
|
|
144
144
|
def create(
|
|
145
145
|
cls, config_dict: dict, ctx: PipelineContext
|
|
146
146
|
) -> "SimpleAddDatasetProperties":
|
|
147
|
-
config = SimpleAddDatasetPropertiesConfig.
|
|
147
|
+
config = SimpleAddDatasetPropertiesConfig.model_validate(config_dict)
|
|
148
148
|
return cls(config, ctx)
|
|
@@ -38,7 +38,7 @@ class AddDatasetSchemaTags(DatasetSchemaMetadataTransformer):
|
|
|
38
38
|
|
|
39
39
|
@classmethod
|
|
40
40
|
def create(cls, config_dict: dict, ctx: PipelineContext) -> "AddDatasetSchemaTags":
|
|
41
|
-
config = AddDatasetSchemaTagsConfig.
|
|
41
|
+
config = AddDatasetSchemaTagsConfig.model_validate(config_dict)
|
|
42
42
|
return cls(config, ctx)
|
|
43
43
|
|
|
44
44
|
def extend_field(
|
|
@@ -142,5 +142,5 @@ class PatternAddDatasetSchemaTags(AddDatasetSchemaTags):
|
|
|
142
142
|
def create(
|
|
143
143
|
cls, config_dict: dict, ctx: PipelineContext
|
|
144
144
|
) -> "PatternAddDatasetSchemaTags":
|
|
145
|
-
config = PatternDatasetTagsConfig.
|
|
145
|
+
config = PatternDatasetTagsConfig.model_validate(config_dict)
|
|
146
146
|
return cls(config, ctx)
|
|
@@ -39,7 +39,7 @@ class AddDatasetSchemaTerms(DatasetSchemaMetadataTransformer):
|
|
|
39
39
|
|
|
40
40
|
@classmethod
|
|
41
41
|
def create(cls, config_dict: dict, ctx: PipelineContext) -> "AddDatasetSchemaTerms":
|
|
42
|
-
config = AddDatasetSchemaTermsConfig.
|
|
42
|
+
config = AddDatasetSchemaTermsConfig.model_validate(config_dict)
|
|
43
43
|
return cls(config, ctx)
|
|
44
44
|
|
|
45
45
|
def extend_field(
|
|
@@ -162,5 +162,5 @@ class PatternAddDatasetSchemaTerms(AddDatasetSchemaTerms):
|
|
|
162
162
|
def create(
|
|
163
163
|
cls, config_dict: dict, ctx: PipelineContext
|
|
164
164
|
) -> "PatternAddDatasetSchemaTerms":
|
|
165
|
-
config = PatternDatasetTermsConfig.
|
|
165
|
+
config = PatternDatasetTermsConfig.model_validate(config_dict)
|
|
166
166
|
return cls(config, ctx)
|
|
@@ -41,7 +41,7 @@ class AddDatasetTags(DatasetTagsTransformer):
|
|
|
41
41
|
|
|
42
42
|
@classmethod
|
|
43
43
|
def create(cls, config_dict: dict, ctx: PipelineContext) -> "AddDatasetTags":
|
|
44
|
-
config = AddDatasetTagsConfig.
|
|
44
|
+
config = AddDatasetTagsConfig.model_validate(config_dict)
|
|
45
45
|
return cls(config, ctx)
|
|
46
46
|
|
|
47
47
|
def transform_aspect(
|
|
@@ -104,7 +104,7 @@ class SimpleAddDatasetTags(AddDatasetTags):
|
|
|
104
104
|
|
|
105
105
|
@classmethod
|
|
106
106
|
def create(cls, config_dict: dict, ctx: PipelineContext) -> "SimpleAddDatasetTags":
|
|
107
|
-
config = SimpleDatasetTagConfig.
|
|
107
|
+
config = SimpleDatasetTagConfig.model_validate(config_dict)
|
|
108
108
|
return cls(config, ctx)
|
|
109
109
|
|
|
110
110
|
|
|
@@ -128,5 +128,5 @@ class PatternAddDatasetTags(AddDatasetTags):
|
|
|
128
128
|
|
|
129
129
|
@classmethod
|
|
130
130
|
def create(cls, config_dict: dict, ctx: PipelineContext) -> "PatternAddDatasetTags":
|
|
131
|
-
config = PatternDatasetTagsConfig.
|
|
131
|
+
config = PatternDatasetTagsConfig.model_validate(config_dict)
|
|
132
132
|
return cls(config, ctx)
|
|
@@ -39,7 +39,7 @@ class AddDatasetTerms(DatasetTermsTransformer):
|
|
|
39
39
|
|
|
40
40
|
@classmethod
|
|
41
41
|
def create(cls, config_dict: dict, ctx: PipelineContext) -> "AddDatasetTerms":
|
|
42
|
-
config = AddDatasetTermsConfig.
|
|
42
|
+
config = AddDatasetTermsConfig.model_validate(config_dict)
|
|
43
43
|
return cls(config, ctx)
|
|
44
44
|
|
|
45
45
|
@staticmethod
|
|
@@ -120,7 +120,7 @@ class SimpleAddDatasetTerms(AddDatasetTerms):
|
|
|
120
120
|
|
|
121
121
|
@classmethod
|
|
122
122
|
def create(cls, config_dict: dict, ctx: PipelineContext) -> "SimpleAddDatasetTerms":
|
|
123
|
-
config = SimpleDatasetTermsConfig.
|
|
123
|
+
config = SimpleDatasetTermsConfig.model_validate(config_dict)
|
|
124
124
|
return cls(config, ctx)
|
|
125
125
|
|
|
126
126
|
|
|
@@ -147,5 +147,5 @@ class PatternAddDatasetTerms(AddDatasetTerms):
|
|
|
147
147
|
def create(
|
|
148
148
|
cls, config_dict: dict, ctx: PipelineContext
|
|
149
149
|
) -> "PatternAddDatasetTerms":
|
|
150
|
-
config = PatternDatasetTermsConfig.
|
|
150
|
+
config = PatternDatasetTermsConfig.model_validate(config_dict)
|
|
151
151
|
return cls(config, ctx)
|
|
@@ -67,7 +67,7 @@ class AddDatasetDomain(DatasetDomainTransformer):
|
|
|
67
67
|
|
|
68
68
|
@classmethod
|
|
69
69
|
def create(cls, config_dict: dict, ctx: PipelineContext) -> "AddDatasetDomain":
|
|
70
|
-
config = AddDatasetDomainSemanticsConfig.
|
|
70
|
+
config = AddDatasetDomainSemanticsConfig.model_validate(config_dict)
|
|
71
71
|
return cls(config, ctx)
|
|
72
72
|
|
|
73
73
|
@staticmethod
|
|
@@ -208,7 +208,7 @@ class SimpleAddDatasetDomain(AddDatasetDomain):
|
|
|
208
208
|
def create(
|
|
209
209
|
cls, config_dict: dict, ctx: PipelineContext
|
|
210
210
|
) -> "SimpleAddDatasetDomain":
|
|
211
|
-
config = SimpleDatasetDomainSemanticsConfig.
|
|
211
|
+
config = SimpleDatasetDomainSemanticsConfig.model_validate(config_dict)
|
|
212
212
|
return cls(config, ctx)
|
|
213
213
|
|
|
214
214
|
|
|
@@ -238,5 +238,5 @@ class PatternAddDatasetDomain(AddDatasetDomain):
|
|
|
238
238
|
def create(
|
|
239
239
|
cls, config_dict: dict, ctx: PipelineContext
|
|
240
240
|
) -> "PatternAddDatasetDomain":
|
|
241
|
-
config = PatternDatasetDomainSemanticsConfig.
|
|
241
|
+
config = PatternDatasetDomainSemanticsConfig.model_validate(config_dict)
|
|
242
242
|
return cls(config, ctx)
|
|
@@ -27,7 +27,7 @@ class DatasetTagDomainMapper(DatasetDomainTransformer):
|
|
|
27
27
|
def create(
|
|
28
28
|
cls, config_dict: dict, ctx: PipelineContext
|
|
29
29
|
) -> "DatasetTagDomainMapper":
|
|
30
|
-
config = DatasetTagDomainMapperConfig.
|
|
30
|
+
config = DatasetTagDomainMapperConfig.model_validate(config_dict)
|
|
31
31
|
return cls(config, ctx)
|
|
32
32
|
|
|
33
33
|
def transform_aspect(
|
|
@@ -29,7 +29,7 @@ class ExtractDatasetTags(DatasetTagsTransformer):
|
|
|
29
29
|
|
|
30
30
|
@classmethod
|
|
31
31
|
def create(cls, config_dict: dict, ctx: PipelineContext) -> "ExtractDatasetTags":
|
|
32
|
-
config = ExtractDatasetTagsConfig.
|
|
32
|
+
config = ExtractDatasetTagsConfig.model_validate(config_dict)
|
|
33
33
|
return cls(config, ctx)
|
|
34
34
|
|
|
35
35
|
def _get_tags_to_add(self, entity_urn: str) -> List[TagAssociationClass]:
|
|
@@ -62,7 +62,7 @@ class ExtractOwnersFromTagsTransformer(DatasetTagsTransformer):
|
|
|
62
62
|
def create(
|
|
63
63
|
cls, config_dict: dict, ctx: PipelineContext
|
|
64
64
|
) -> "ExtractOwnersFromTagsTransformer":
|
|
65
|
-
config = ExtractOwnersFromTagsConfig.
|
|
65
|
+
config = ExtractOwnersFromTagsConfig.model_validate(config_dict)
|
|
66
66
|
return cls(config, ctx)
|
|
67
67
|
|
|
68
68
|
def get_owner_urn(self, owner_str: str) -> str:
|
|
@@ -24,7 +24,7 @@ class MarkDatasetStatus(DatasetStatusTransformer):
|
|
|
24
24
|
|
|
25
25
|
@classmethod
|
|
26
26
|
def create(cls, config_dict: dict, ctx: PipelineContext) -> "MarkDatasetStatus":
|
|
27
|
-
config = MarkDatasetStatusConfig.
|
|
27
|
+
config = MarkDatasetStatusConfig.model_validate(config_dict)
|
|
28
28
|
return cls(config, ctx)
|
|
29
29
|
|
|
30
30
|
def transform_aspect(
|
|
@@ -38,7 +38,7 @@ class PatternCleanupDatasetUsageUser(DatasetUsageStatisticsTransformer):
|
|
|
38
38
|
def create(
|
|
39
39
|
cls, config_dict: dict, ctx: PipelineContext
|
|
40
40
|
) -> "PatternCleanupDatasetUsageUser":
|
|
41
|
-
config = PatternCleanupDatasetUsageUserConfig.
|
|
41
|
+
config = PatternCleanupDatasetUsageUserConfig.model_validate(config_dict)
|
|
42
42
|
return cls(config, ctx)
|
|
43
43
|
|
|
44
44
|
def transform_aspect(
|
|
@@ -37,7 +37,7 @@ class PatternCleanUpOwnership(OwnershipTransformer):
|
|
|
37
37
|
def create(
|
|
38
38
|
cls, config_dict: dict, ctx: PipelineContext
|
|
39
39
|
) -> "PatternCleanUpOwnership":
|
|
40
|
-
config = PatternCleanUpOwnershipConfig.
|
|
40
|
+
config = PatternCleanUpOwnershipConfig.model_validate(config_dict)
|
|
41
41
|
return cls(config, ctx)
|
|
42
42
|
|
|
43
43
|
def _get_current_owner_urns(self, entity_urn: str) -> Set[str]:
|
|
@@ -21,7 +21,7 @@ class SimpleRemoveDatasetOwnership(OwnershipTransformer):
|
|
|
21
21
|
def create(
|
|
22
22
|
cls, config_dict: dict, ctx: PipelineContext
|
|
23
23
|
) -> "SimpleRemoveDatasetOwnership":
|
|
24
|
-
config = ClearDatasetOwnershipConfig.
|
|
24
|
+
config = ClearDatasetOwnershipConfig.model_validate(config_dict)
|
|
25
25
|
return cls(config, ctx)
|
|
26
26
|
|
|
27
27
|
def transform_aspect(
|
|
@@ -47,7 +47,7 @@ class ReplaceExternalUrlDataset(DatasetPropertiesTransformer, ReplaceUrl):
|
|
|
47
47
|
def create(
|
|
48
48
|
cls, config_dict: dict, ctx: PipelineContext
|
|
49
49
|
) -> "ReplaceExternalUrlDataset":
|
|
50
|
-
config = ReplaceExternalUrlConfig.
|
|
50
|
+
config = ReplaceExternalUrlConfig.model_validate(config_dict)
|
|
51
51
|
return cls(config, ctx)
|
|
52
52
|
|
|
53
53
|
def transform_aspect(
|
|
@@ -97,7 +97,7 @@ class ReplaceExternalUrlContainer(ContainerPropertiesTransformer, ReplaceUrl):
|
|
|
97
97
|
def create(
|
|
98
98
|
cls, config_dict: dict, ctx: PipelineContext
|
|
99
99
|
) -> "ReplaceExternalUrlContainer":
|
|
100
|
-
config = ReplaceExternalUrlConfig.
|
|
100
|
+
config = ReplaceExternalUrlConfig.model_validate(config_dict)
|
|
101
101
|
return cls(config, ctx)
|
|
102
102
|
|
|
103
103
|
def transform_aspect(
|
|
@@ -42,7 +42,7 @@ class SetBrowsePathTransformer(BaseTransformer, SingleAspectTransformer):
|
|
|
42
42
|
def create(
|
|
43
43
|
cls, config_dict: dict, ctx: PipelineContext
|
|
44
44
|
) -> "SetBrowsePathTransformer":
|
|
45
|
-
config = SetBrowsePathTransformerConfig.
|
|
45
|
+
config = SetBrowsePathTransformerConfig.model_validate(config_dict)
|
|
46
46
|
return cls(config, ctx)
|
|
47
47
|
|
|
48
48
|
@staticmethod
|
|
@@ -32,7 +32,7 @@ class TagsToTermMapper(TagsToTermTransformer):
|
|
|
32
32
|
|
|
33
33
|
@classmethod
|
|
34
34
|
def create(cls, config_dict: dict, ctx: PipelineContext) -> "TagsToTermMapper":
|
|
35
|
-
config = TagsToTermMapperConfig.
|
|
35
|
+
config = TagsToTermMapperConfig.model_validate(config_dict)
|
|
36
36
|
return cls(config, ctx)
|
|
37
37
|
|
|
38
38
|
@staticmethod
|
datahub/lite/duckdb_lite.py
CHANGED
|
@@ -42,7 +42,7 @@ logger = logging.getLogger(__name__)
|
|
|
42
42
|
class DuckDBLite(DataHubLiteLocal[DuckDBLiteConfig]):
|
|
43
43
|
@classmethod
|
|
44
44
|
def create(cls, config_dict: dict) -> "DuckDBLite":
|
|
45
|
-
config: DuckDBLiteConfig = DuckDBLiteConfig.
|
|
45
|
+
config: DuckDBLiteConfig = DuckDBLiteConfig.model_validate(config_dict)
|
|
46
46
|
return DuckDBLite(config)
|
|
47
47
|
|
|
48
48
|
def __init__(self, config: DuckDBLiteConfig) -> None:
|
datahub/lite/lite_util.py
CHANGED
|
@@ -92,7 +92,7 @@ class DataHubLiteWrapper(DataHubLiteLocal):
|
|
|
92
92
|
|
|
93
93
|
|
|
94
94
|
def get_datahub_lite(config_dict: dict, read_only: bool = False) -> "DataHubLiteLocal":
|
|
95
|
-
lite_local_config = LiteLocalConfig.
|
|
95
|
+
lite_local_config = LiteLocalConfig.model_validate(config_dict)
|
|
96
96
|
|
|
97
97
|
lite_type = lite_local_config.type
|
|
98
98
|
try:
|
|
@@ -102,7 +102,7 @@ def get_datahub_lite(config_dict: dict, read_only: bool = False) -> "DataHubLite
|
|
|
102
102
|
f"Failed to find a registered lite implementation for {lite_type}. Valid values are {[k for k in lite_registry.mapping]}"
|
|
103
103
|
) from e
|
|
104
104
|
|
|
105
|
-
lite_specific_config = lite_class.get_config_class().
|
|
105
|
+
lite_specific_config = lite_class.get_config_class().model_validate(
|
|
106
106
|
lite_local_config.config
|
|
107
107
|
)
|
|
108
108
|
lite = lite_class(lite_specific_config)
|
|
@@ -511,6 +511,7 @@ class AssertionInfoClass(_Aspect):
|
|
|
511
511
|
source: Union[None, "AssertionSourceClass"]=None,
|
|
512
512
|
lastUpdated: Union[None, "AuditStampClass"]=None,
|
|
513
513
|
description: Union[None, str]=None,
|
|
514
|
+
note: Union[None, "AssertionNoteClass"]=None,
|
|
514
515
|
):
|
|
515
516
|
super().__init__()
|
|
516
517
|
|
|
@@ -531,6 +532,7 @@ class AssertionInfoClass(_Aspect):
|
|
|
531
532
|
self.source = source
|
|
532
533
|
self.lastUpdated = lastUpdated
|
|
533
534
|
self.description = description
|
|
535
|
+
self.note = note
|
|
534
536
|
|
|
535
537
|
def _restore_defaults(self) -> None:
|
|
536
538
|
self.customProperties = dict()
|
|
@@ -546,6 +548,7 @@ class AssertionInfoClass(_Aspect):
|
|
|
546
548
|
self.source = self.RECORD_SCHEMA.fields_dict["source"].default
|
|
547
549
|
self.lastUpdated = self.RECORD_SCHEMA.fields_dict["lastUpdated"].default
|
|
548
550
|
self.description = self.RECORD_SCHEMA.fields_dict["description"].default
|
|
551
|
+
self.note = self.RECORD_SCHEMA.fields_dict["note"].default
|
|
549
552
|
|
|
550
553
|
|
|
551
554
|
@property
|
|
@@ -570,7 +573,7 @@ class AssertionInfoClass(_Aspect):
|
|
|
570
573
|
|
|
571
574
|
@property
|
|
572
575
|
def type(self) -> Union[str, "AssertionTypeClass"]:
|
|
573
|
-
"""Type of assertion.
|
|
576
|
+
"""Type of assertion."""
|
|
574
577
|
return self._inner_dict.get('type') # type: ignore
|
|
575
578
|
|
|
576
579
|
@type.setter
|
|
@@ -682,6 +685,55 @@ class AssertionInfoClass(_Aspect):
|
|
|
682
685
|
self._inner_dict['description'] = value
|
|
683
686
|
|
|
684
687
|
|
|
688
|
+
@property
|
|
689
|
+
def note(self) -> Union[None, "AssertionNoteClass"]:
|
|
690
|
+
"""An optional note to give technical owners more context about the assertion, and how to troubleshoot it.
|
|
691
|
+
The UI will render this in markdown format."""
|
|
692
|
+
return self._inner_dict.get('note') # type: ignore
|
|
693
|
+
|
|
694
|
+
@note.setter
|
|
695
|
+
def note(self, value: Union[None, "AssertionNoteClass"]) -> None:
|
|
696
|
+
self._inner_dict['note'] = value
|
|
697
|
+
|
|
698
|
+
|
|
699
|
+
class AssertionNoteClass(DictWrapper):
|
|
700
|
+
# No docs available.
|
|
701
|
+
|
|
702
|
+
RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.assertion.AssertionNote")
|
|
703
|
+
def __init__(self,
|
|
704
|
+
content: str,
|
|
705
|
+
lastModified: "AuditStampClass",
|
|
706
|
+
):
|
|
707
|
+
super().__init__()
|
|
708
|
+
|
|
709
|
+
self.content = content
|
|
710
|
+
self.lastModified = lastModified
|
|
711
|
+
|
|
712
|
+
def _restore_defaults(self) -> None:
|
|
713
|
+
self.content = str()
|
|
714
|
+
self.lastModified = AuditStampClass._construct_with_defaults()
|
|
715
|
+
|
|
716
|
+
|
|
717
|
+
@property
|
|
718
|
+
def content(self) -> str:
|
|
719
|
+
"""The note to give technical owners more context about the assertion, and how to troubleshoot it."""
|
|
720
|
+
return self._inner_dict.get('content') # type: ignore
|
|
721
|
+
|
|
722
|
+
@content.setter
|
|
723
|
+
def content(self, value: str) -> None:
|
|
724
|
+
self._inner_dict['content'] = value
|
|
725
|
+
|
|
726
|
+
|
|
727
|
+
@property
|
|
728
|
+
def lastModified(self) -> "AuditStampClass":
|
|
729
|
+
"""The time at which the note was last modified."""
|
|
730
|
+
return self._inner_dict.get('lastModified') # type: ignore
|
|
731
|
+
|
|
732
|
+
@lastModified.setter
|
|
733
|
+
def lastModified(self, value: "AuditStampClass") -> None:
|
|
734
|
+
self._inner_dict['lastModified'] = value
|
|
735
|
+
|
|
736
|
+
|
|
685
737
|
class AssertionResultClass(DictWrapper):
|
|
686
738
|
"""The result of running an assertion"""
|
|
687
739
|
|
|
@@ -1337,7 +1389,7 @@ class AssertionStdParametersClass(DictWrapper):
|
|
|
1337
1389
|
|
|
1338
1390
|
|
|
1339
1391
|
class AssertionTypeClass(object):
|
|
1340
|
-
|
|
1392
|
+
"""Type of assertion. Assertion types can evolve to span Datasets, Flows (Pipelines), Models, Features etc."""
|
|
1341
1393
|
|
|
1342
1394
|
DATASET = "DATASET"
|
|
1343
1395
|
"""A single-dataset assertion.
|
|
@@ -12623,6 +12675,9 @@ class NotificationSinkTypeClass(object):
|
|
|
12623
12675
|
EMAIL = "EMAIL"
|
|
12624
12676
|
"""Email target type."""
|
|
12625
12677
|
|
|
12678
|
+
TEAMS = "TEAMS"
|
|
12679
|
+
"""Microsoft Teams target type."""
|
|
12680
|
+
|
|
12626
12681
|
|
|
12627
12682
|
|
|
12628
12683
|
class EmailNotificationSettingsClass(DictWrapper):
|
|
@@ -20439,6 +20494,9 @@ class DataHubPageModuleTypeClass(object):
|
|
|
20439
20494
|
PLATFORMS = "PLATFORMS"
|
|
20440
20495
|
"""Module displaying the platforms in an instance"""
|
|
20441
20496
|
|
|
20497
|
+
UNKNOWN = "UNKNOWN"
|
|
20498
|
+
"""Unknown module type - this can occur with corrupted data or rolling back to versions without new modules"""
|
|
20499
|
+
|
|
20442
20500
|
|
|
20443
20501
|
|
|
20444
20502
|
class DataHubPageModuleVisibilityClass(DictWrapper):
|
|
@@ -27742,6 +27800,7 @@ __SCHEMA_TYPES = {
|
|
|
27742
27800
|
'com.linkedin.pegasus2avro.assertion.AssertionActionType': AssertionActionTypeClass,
|
|
27743
27801
|
'com.linkedin.pegasus2avro.assertion.AssertionActions': AssertionActionsClass,
|
|
27744
27802
|
'com.linkedin.pegasus2avro.assertion.AssertionInfo': AssertionInfoClass,
|
|
27803
|
+
'com.linkedin.pegasus2avro.assertion.AssertionNote': AssertionNoteClass,
|
|
27745
27804
|
'com.linkedin.pegasus2avro.assertion.AssertionResult': AssertionResultClass,
|
|
27746
27805
|
'com.linkedin.pegasus2avro.assertion.AssertionResultError': AssertionResultErrorClass,
|
|
27747
27806
|
'com.linkedin.pegasus2avro.assertion.AssertionResultErrorType': AssertionResultErrorTypeClass,
|
|
@@ -28268,6 +28327,7 @@ __SCHEMA_TYPES = {
|
|
|
28268
28327
|
'AssertionActionType': AssertionActionTypeClass,
|
|
28269
28328
|
'AssertionActions': AssertionActionsClass,
|
|
28270
28329
|
'AssertionInfo': AssertionInfoClass,
|
|
28330
|
+
'AssertionNote': AssertionNoteClass,
|
|
28271
28331
|
'AssertionResult': AssertionResultClass,
|
|
28272
28332
|
'AssertionResultError': AssertionResultErrorClass,
|
|
28273
28333
|
'AssertionResultErrorType': AssertionResultErrorTypeClass,
|
|
@@ -11,6 +11,7 @@ from .....schema_classes import AssertionActionClass
|
|
|
11
11
|
from .....schema_classes import AssertionActionTypeClass
|
|
12
12
|
from .....schema_classes import AssertionActionsClass
|
|
13
13
|
from .....schema_classes import AssertionInfoClass
|
|
14
|
+
from .....schema_classes import AssertionNoteClass
|
|
14
15
|
from .....schema_classes import AssertionResultClass
|
|
15
16
|
from .....schema_classes import AssertionResultErrorClass
|
|
16
17
|
from .....schema_classes import AssertionResultErrorTypeClass
|
|
@@ -64,6 +65,7 @@ AssertionAction = AssertionActionClass
|
|
|
64
65
|
AssertionActionType = AssertionActionTypeClass
|
|
65
66
|
AssertionActions = AssertionActionsClass
|
|
66
67
|
AssertionInfo = AssertionInfoClass
|
|
68
|
+
AssertionNote = AssertionNoteClass
|
|
67
69
|
AssertionResult = AssertionResultClass
|
|
68
70
|
AssertionResultError = AssertionResultErrorClass
|
|
69
71
|
AssertionResultErrorType = AssertionResultErrorTypeClass
|