acryl-datahub 1.1.1rc3__py3-none-any.whl → 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of acryl-datahub might be problematic. Click here for more details.
- {acryl_datahub-1.1.1rc3.dist-info → acryl_datahub-1.2.0.dist-info}/METADATA +2559 -2532
- {acryl_datahub-1.1.1rc3.dist-info → acryl_datahub-1.2.0.dist-info}/RECORD +226 -190
- {acryl_datahub-1.1.1rc3.dist-info → acryl_datahub-1.2.0.dist-info}/WHEEL +1 -1
- {acryl_datahub-1.1.1rc3.dist-info → acryl_datahub-1.2.0.dist-info}/entry_points.txt +2 -0
- datahub/_version.py +1 -1
- datahub/api/entities/dataset/dataset.py +2 -1
- datahub/api/entities/external/__init__.py +0 -0
- datahub/api/entities/external/external_entities.py +239 -0
- datahub/api/entities/external/external_tag.py +145 -0
- datahub/api/entities/external/lake_formation_external_entites.py +161 -0
- datahub/api/entities/external/restricted_text.py +247 -0
- datahub/api/entities/external/unity_catalog_external_entites.py +173 -0
- datahub/cli/check_cli.py +88 -7
- datahub/cli/cli_utils.py +63 -0
- datahub/cli/container_cli.py +5 -0
- datahub/cli/delete_cli.py +124 -27
- datahub/cli/docker_check.py +107 -12
- datahub/cli/docker_cli.py +149 -227
- datahub/cli/exists_cli.py +0 -2
- datahub/cli/get_cli.py +0 -2
- datahub/cli/iceberg_cli.py +5 -0
- datahub/cli/ingest_cli.py +12 -16
- datahub/cli/migrate.py +2 -0
- datahub/cli/put_cli.py +1 -4
- datahub/cli/quickstart_versioning.py +50 -7
- datahub/cli/specific/assertions_cli.py +0 -4
- datahub/cli/specific/datacontract_cli.py +0 -3
- datahub/cli/specific/dataproduct_cli.py +0 -11
- datahub/cli/specific/dataset_cli.py +1 -8
- datahub/cli/specific/forms_cli.py +0 -4
- datahub/cli/specific/group_cli.py +0 -2
- datahub/cli/specific/structuredproperties_cli.py +1 -4
- datahub/cli/specific/user_cli.py +0 -2
- datahub/cli/state_cli.py +0 -2
- datahub/cli/timeline_cli.py +0 -2
- datahub/emitter/response_helper.py +86 -1
- datahub/emitter/rest_emitter.py +71 -13
- datahub/entrypoints.py +4 -3
- datahub/ingestion/api/decorators.py +15 -3
- datahub/ingestion/api/report.py +332 -3
- datahub/ingestion/api/sink.py +3 -0
- datahub/ingestion/api/source.py +48 -44
- datahub/ingestion/autogenerated/__init__.py +0 -0
- datahub/ingestion/autogenerated/capability_summary.json +3449 -0
- datahub/ingestion/autogenerated/lineage.json +401 -0
- datahub/ingestion/autogenerated/lineage_helper.py +177 -0
- datahub/ingestion/extractor/schema_util.py +13 -4
- datahub/ingestion/glossary/classification_mixin.py +5 -0
- datahub/ingestion/graph/client.py +100 -15
- datahub/ingestion/graph/config.py +1 -0
- datahub/ingestion/reporting/datahub_ingestion_run_summary_provider.py +20 -10
- datahub/ingestion/run/pipeline.py +54 -2
- datahub/ingestion/sink/datahub_rest.py +13 -0
- datahub/ingestion/source/abs/source.py +1 -1
- datahub/ingestion/source/aws/aws_common.py +4 -0
- datahub/ingestion/source/aws/glue.py +489 -244
- datahub/ingestion/source/aws/tag_entities.py +292 -0
- datahub/ingestion/source/azure/azure_common.py +2 -2
- datahub/ingestion/source/bigquery_v2/bigquery.py +50 -23
- datahub/ingestion/source/bigquery_v2/bigquery_config.py +1 -1
- datahub/ingestion/source/bigquery_v2/bigquery_queries.py +1 -0
- datahub/ingestion/source/bigquery_v2/bigquery_schema_gen.py +2 -0
- datahub/ingestion/source/bigquery_v2/common.py +1 -1
- datahub/ingestion/source/bigquery_v2/profiler.py +4 -2
- datahub/ingestion/source/bigquery_v2/queries.py +3 -3
- datahub/ingestion/source/cassandra/cassandra.py +1 -1
- datahub/ingestion/source/cassandra/cassandra_profiling.py +6 -5
- datahub/ingestion/source/common/subtypes.py +45 -0
- datahub/ingestion/source/data_lake_common/object_store.py +115 -27
- datahub/ingestion/source/data_lake_common/path_spec.py +10 -21
- datahub/ingestion/source/datahub/config.py +11 -0
- datahub/ingestion/source/datahub/datahub_database_reader.py +187 -35
- datahub/ingestion/source/datahub/datahub_source.py +1 -1
- datahub/ingestion/source/dbt/dbt_cloud.py +10 -2
- datahub/ingestion/source/dbt/dbt_common.py +6 -2
- datahub/ingestion/source/dbt/dbt_core.py +3 -0
- datahub/ingestion/source/debug/__init__.py +0 -0
- datahub/ingestion/source/debug/datahub_debug.py +300 -0
- datahub/ingestion/source/dremio/dremio_api.py +114 -73
- datahub/ingestion/source/dremio/dremio_config.py +2 -0
- datahub/ingestion/source/dremio/dremio_reporting.py +23 -2
- datahub/ingestion/source/dremio/dremio_source.py +94 -81
- datahub/ingestion/source/dremio/dremio_sql_queries.py +82 -21
- datahub/ingestion/source/file.py +3 -0
- datahub/ingestion/source/fivetran/fivetran.py +34 -26
- datahub/ingestion/source/gcs/gcs_source.py +13 -2
- datahub/ingestion/source/ge_data_profiler.py +76 -28
- datahub/ingestion/source/ge_profiling_config.py +11 -0
- datahub/ingestion/source/hex/api.py +26 -1
- datahub/ingestion/source/iceberg/iceberg.py +3 -1
- datahub/ingestion/source/identity/azure_ad.py +1 -1
- datahub/ingestion/source/identity/okta.py +1 -14
- datahub/ingestion/source/kafka/kafka.py +16 -0
- datahub/ingestion/source/kafka_connect/sink_connectors.py +156 -47
- datahub/ingestion/source/kafka_connect/source_connectors.py +59 -4
- datahub/ingestion/source/looker/looker_source.py +1 -0
- datahub/ingestion/source/mlflow.py +11 -1
- datahub/ingestion/source/mock_data/__init__.py +0 -0
- datahub/ingestion/source/mock_data/datahub_mock_data.py +472 -0
- datahub/ingestion/source/mock_data/datahub_mock_data_report.py +12 -0
- datahub/ingestion/source/mock_data/table_naming_helper.py +91 -0
- datahub/ingestion/source/nifi.py +1 -1
- datahub/ingestion/source/openapi.py +12 -0
- datahub/ingestion/source/openapi_parser.py +56 -37
- datahub/ingestion/source/powerbi/powerbi.py +1 -5
- datahub/ingestion/source/powerbi/rest_api_wrapper/powerbi_api.py +0 -1
- datahub/ingestion/source/powerbi_report_server/report_server.py +0 -23
- datahub/ingestion/source/preset.py +2 -2
- datahub/ingestion/source/qlik_sense/qlik_sense.py +1 -0
- datahub/ingestion/source/redshift/redshift.py +21 -1
- datahub/ingestion/source/redshift/usage.py +4 -3
- datahub/ingestion/source/s3/report.py +4 -2
- datahub/ingestion/source/s3/source.py +367 -115
- datahub/ingestion/source/sac/sac.py +3 -1
- datahub/ingestion/source/salesforce.py +6 -3
- datahub/ingestion/source/sigma/sigma.py +7 -1
- datahub/ingestion/source/slack/slack.py +2 -1
- datahub/ingestion/source/snowflake/snowflake_config.py +43 -7
- datahub/ingestion/source/snowflake/snowflake_queries.py +348 -82
- datahub/ingestion/source/snowflake/snowflake_summary.py +5 -0
- datahub/ingestion/source/snowflake/snowflake_usage_v2.py +8 -2
- datahub/ingestion/source/snowflake/snowflake_utils.py +2 -7
- datahub/ingestion/source/snowflake/snowflake_v2.py +33 -8
- datahub/ingestion/source/snowflake/stored_proc_lineage.py +143 -0
- datahub/ingestion/source/sql/athena.py +119 -11
- datahub/ingestion/source/sql/athena_properties_extractor.py +777 -0
- datahub/ingestion/source/sql/clickhouse.py +3 -1
- datahub/ingestion/source/sql/cockroachdb.py +0 -1
- datahub/ingestion/source/sql/hana.py +3 -1
- datahub/ingestion/source/sql/hive_metastore.py +3 -11
- datahub/ingestion/source/sql/mariadb.py +0 -1
- datahub/ingestion/source/sql/mssql/source.py +239 -34
- datahub/ingestion/source/sql/mysql.py +0 -1
- datahub/ingestion/source/sql/oracle.py +1 -1
- datahub/ingestion/source/sql/postgres.py +0 -1
- datahub/ingestion/source/sql/sql_common.py +121 -34
- datahub/ingestion/source/sql/sql_generic_profiler.py +2 -1
- datahub/ingestion/source/sql/teradata.py +997 -235
- datahub/ingestion/source/sql/vertica.py +10 -6
- datahub/ingestion/source/sql_queries.py +2 -2
- datahub/ingestion/source/state/stateful_ingestion_base.py +1 -1
- datahub/ingestion/source/superset.py +58 -3
- datahub/ingestion/source/tableau/tableau.py +58 -37
- datahub/ingestion/source/tableau/tableau_common.py +4 -2
- datahub/ingestion/source/tableau/tableau_constant.py +0 -4
- datahub/ingestion/source/unity/config.py +5 -0
- datahub/ingestion/source/unity/proxy.py +118 -0
- datahub/ingestion/source/unity/source.py +195 -17
- datahub/ingestion/source/unity/tag_entities.py +295 -0
- datahub/ingestion/source/usage/clickhouse_usage.py +4 -1
- datahub/ingestion/source/usage/starburst_trino_usage.py +3 -0
- datahub/ingestion/transformer/add_dataset_ownership.py +18 -2
- datahub/integrations/assertion/snowflake/compiler.py +4 -3
- datahub/metadata/_internal_schema_classes.py +1446 -559
- datahub/metadata/_urns/urn_defs.py +1721 -1553
- datahub/metadata/com/linkedin/pegasus2avro/application/__init__.py +19 -0
- datahub/metadata/com/linkedin/pegasus2avro/identity/__init__.py +2 -0
- datahub/metadata/com/linkedin/pegasus2avro/logical/__init__.py +15 -0
- datahub/metadata/com/linkedin/pegasus2avro/metadata/key/__init__.py +4 -0
- datahub/metadata/com/linkedin/pegasus2avro/module/__init__.py +27 -0
- datahub/metadata/com/linkedin/pegasus2avro/settings/global/__init__.py +4 -0
- datahub/metadata/com/linkedin/pegasus2avro/template/__init__.py +25 -0
- datahub/metadata/schema.avsc +18055 -17802
- datahub/metadata/schemas/ApplicationKey.avsc +31 -0
- datahub/metadata/schemas/ApplicationProperties.avsc +72 -0
- datahub/metadata/schemas/Applications.avsc +38 -0
- datahub/metadata/schemas/ChartKey.avsc +1 -0
- datahub/metadata/schemas/ContainerKey.avsc +1 -0
- datahub/metadata/schemas/ContainerProperties.avsc +8 -0
- datahub/metadata/schemas/CorpUserSettings.avsc +41 -0
- datahub/metadata/schemas/DashboardKey.avsc +1 -0
- datahub/metadata/schemas/DataFlowInfo.avsc +8 -0
- datahub/metadata/schemas/DataFlowKey.avsc +1 -0
- datahub/metadata/schemas/DataHubPageModuleKey.avsc +21 -0
- datahub/metadata/schemas/DataHubPageModuleProperties.avsc +200 -0
- datahub/metadata/schemas/DataHubPageTemplateKey.avsc +21 -0
- datahub/metadata/schemas/DataHubPageTemplateProperties.avsc +175 -0
- datahub/metadata/schemas/DataHubPolicyInfo.avsc +12 -1
- datahub/metadata/schemas/DataJobInfo.avsc +8 -0
- datahub/metadata/schemas/DataJobKey.avsc +1 -0
- datahub/metadata/schemas/DataProcessKey.avsc +8 -0
- datahub/metadata/schemas/DataProductKey.avsc +1 -0
- datahub/metadata/schemas/DataProductProperties.avsc +1 -1
- datahub/metadata/schemas/DatasetKey.avsc +11 -1
- datahub/metadata/schemas/ExecutionRequestInput.avsc +5 -0
- datahub/metadata/schemas/GlobalSettingsInfo.avsc +62 -0
- datahub/metadata/schemas/GlossaryTermKey.avsc +1 -0
- datahub/metadata/schemas/IcebergWarehouseInfo.avsc +8 -0
- datahub/metadata/schemas/LogicalParent.avsc +140 -0
- datahub/metadata/schemas/MLFeatureKey.avsc +1 -0
- datahub/metadata/schemas/MLFeatureTableKey.avsc +1 -0
- datahub/metadata/schemas/MLModelDeploymentKey.avsc +8 -0
- datahub/metadata/schemas/MLModelGroupKey.avsc +9 -0
- datahub/metadata/schemas/MLModelKey.avsc +9 -0
- datahub/metadata/schemas/MLPrimaryKeyKey.avsc +1 -0
- datahub/metadata/schemas/MetadataChangeEvent.avsc +20 -1
- datahub/metadata/schemas/NotebookKey.avsc +1 -0
- datahub/metadata/schemas/QuerySubjects.avsc +1 -12
- datahub/metadata/schemas/SchemaFieldKey.avsc +2 -1
- datahub/sdk/__init__.py +6 -0
- datahub/sdk/_all_entities.py +11 -0
- datahub/sdk/_shared.py +118 -1
- datahub/sdk/chart.py +315 -0
- datahub/sdk/container.py +7 -0
- datahub/sdk/dashboard.py +432 -0
- datahub/sdk/dataflow.py +309 -0
- datahub/sdk/datajob.py +367 -0
- datahub/sdk/dataset.py +8 -2
- datahub/sdk/entity_client.py +90 -2
- datahub/sdk/lineage_client.py +683 -82
- datahub/sdk/main_client.py +46 -16
- datahub/sdk/mlmodel.py +101 -38
- datahub/sdk/mlmodelgroup.py +7 -0
- datahub/sdk/search_client.py +4 -3
- datahub/specific/chart.py +1 -1
- datahub/specific/dataproduct.py +4 -0
- datahub/sql_parsing/sql_parsing_aggregator.py +29 -17
- datahub/sql_parsing/sqlglot_lineage.py +62 -13
- datahub/telemetry/telemetry.py +17 -11
- datahub/testing/sdk_v2_helpers.py +7 -1
- datahub/upgrade/upgrade.py +46 -13
- datahub/utilities/server_config_util.py +8 -0
- datahub/utilities/sqlalchemy_query_combiner.py +5 -2
- datahub/utilities/stats_collections.py +4 -0
- {acryl_datahub-1.1.1rc3.dist-info → acryl_datahub-1.2.0.dist-info}/licenses/LICENSE +0 -0
- {acryl_datahub-1.1.1rc3.dist-info → acryl_datahub-1.2.0.dist-info}/top_level.txt +0 -0
datahub/sdk/dataflow.py
ADDED
|
@@ -0,0 +1,309 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import warnings
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from typing import Dict, Optional, Type, Union
|
|
6
|
+
|
|
7
|
+
from typing_extensions import Self
|
|
8
|
+
|
|
9
|
+
import datahub.metadata.schema_classes as models
|
|
10
|
+
from datahub.cli.cli_utils import first_non_null
|
|
11
|
+
from datahub.emitter.mce_builder import DEFAULT_ENV
|
|
12
|
+
from datahub.errors import (
|
|
13
|
+
IngestionAttributionWarning,
|
|
14
|
+
)
|
|
15
|
+
from datahub.metadata.urns import DataFlowUrn, Urn
|
|
16
|
+
from datahub.sdk._attribution import is_ingestion_attribution
|
|
17
|
+
from datahub.sdk._shared import (
|
|
18
|
+
DomainInputType,
|
|
19
|
+
HasContainer,
|
|
20
|
+
HasDomain,
|
|
21
|
+
HasInstitutionalMemory,
|
|
22
|
+
HasOwnership,
|
|
23
|
+
HasPlatformInstance,
|
|
24
|
+
HasStructuredProperties,
|
|
25
|
+
HasSubtype,
|
|
26
|
+
HasTags,
|
|
27
|
+
HasTerms,
|
|
28
|
+
LinksInputType,
|
|
29
|
+
OwnersInputType,
|
|
30
|
+
ParentContainerInputType,
|
|
31
|
+
StructuredPropertyInputType,
|
|
32
|
+
TagsInputType,
|
|
33
|
+
TermsInputType,
|
|
34
|
+
make_time_stamp,
|
|
35
|
+
parse_time_stamp,
|
|
36
|
+
)
|
|
37
|
+
from datahub.sdk.entity import Entity, ExtraAspectsType
|
|
38
|
+
from datahub.utilities.sentinels import Unset, unset
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class DataFlow(
|
|
42
|
+
HasPlatformInstance,
|
|
43
|
+
HasSubtype,
|
|
44
|
+
HasOwnership,
|
|
45
|
+
HasContainer,
|
|
46
|
+
HasInstitutionalMemory,
|
|
47
|
+
HasTags,
|
|
48
|
+
HasTerms,
|
|
49
|
+
HasDomain,
|
|
50
|
+
HasStructuredProperties,
|
|
51
|
+
Entity,
|
|
52
|
+
):
|
|
53
|
+
"""Represents a dataflow in DataHub.
|
|
54
|
+
A dataflow represents a collection of data, such as a table, view, or file.
|
|
55
|
+
This class provides methods for managing dataflow metadata including schema,
|
|
56
|
+
lineage, and various aspects like ownership, tags, and terms.
|
|
57
|
+
"""
|
|
58
|
+
|
|
59
|
+
__slots__ = ()
|
|
60
|
+
|
|
61
|
+
@classmethod
|
|
62
|
+
def get_urn_type(cls) -> Type[DataFlowUrn]:
|
|
63
|
+
"""Get the URN type for dataflows.
|
|
64
|
+
Returns:
|
|
65
|
+
The DataflowUrn class.
|
|
66
|
+
"""
|
|
67
|
+
return DataFlowUrn
|
|
68
|
+
|
|
69
|
+
def __init__(
|
|
70
|
+
self,
|
|
71
|
+
*,
|
|
72
|
+
# Identity.
|
|
73
|
+
name: str,
|
|
74
|
+
platform: str,
|
|
75
|
+
display_name: Optional[str] = None,
|
|
76
|
+
platform_instance: Optional[str] = None,
|
|
77
|
+
env: str = DEFAULT_ENV,
|
|
78
|
+
# Dataflow properties.
|
|
79
|
+
description: Optional[str] = None,
|
|
80
|
+
external_url: Optional[str] = None,
|
|
81
|
+
custom_properties: Optional[Dict[str, str]] = None,
|
|
82
|
+
created: Optional[datetime] = None,
|
|
83
|
+
last_modified: Optional[datetime] = None,
|
|
84
|
+
# Standard aspects.
|
|
85
|
+
subtype: Optional[str] = None,
|
|
86
|
+
owners: Optional[OwnersInputType] = None,
|
|
87
|
+
links: Optional[LinksInputType] = None,
|
|
88
|
+
tags: Optional[TagsInputType] = None,
|
|
89
|
+
terms: Optional[TermsInputType] = None,
|
|
90
|
+
domain: Optional[DomainInputType] = None,
|
|
91
|
+
parent_container: ParentContainerInputType | Unset = unset,
|
|
92
|
+
structured_properties: Optional[StructuredPropertyInputType] = None,
|
|
93
|
+
extra_aspects: ExtraAspectsType = None,
|
|
94
|
+
):
|
|
95
|
+
"""Initialize a new Dataflow instance.
|
|
96
|
+
Args:
|
|
97
|
+
platform: The platform this dataflow belongs to (e.g. "mysql", "snowflake").
|
|
98
|
+
name: The name of the dataflow.
|
|
99
|
+
platform_instance: Optional platform instance identifier.
|
|
100
|
+
env: The environment this dataflow belongs to (default: DEFAULT_ENV).
|
|
101
|
+
description: Optional description of the dataflow.
|
|
102
|
+
display_name: Optional display name for the dataflow.
|
|
103
|
+
external_url: Optional URL to external documentation or source.
|
|
104
|
+
custom_properties: Optional dictionary of custom properties.
|
|
105
|
+
created: Optional creation timestamp.
|
|
106
|
+
last_modified: Optional last modification timestamp.
|
|
107
|
+
subtype: Optional subtype of the dataflow.
|
|
108
|
+
owners: Optional list of owners.
|
|
109
|
+
links: Optional list of links.
|
|
110
|
+
tags: Optional list of tags.
|
|
111
|
+
terms: Optional list of glossary terms.
|
|
112
|
+
domain: Optional domain this dataflow belongs to.
|
|
113
|
+
extra_aspects: Optional list of additional aspects.
|
|
114
|
+
upstreams: Optional upstream lineage information.
|
|
115
|
+
"""
|
|
116
|
+
urn = DataFlowUrn.create_from_ids(
|
|
117
|
+
orchestrator=platform,
|
|
118
|
+
flow_id=name,
|
|
119
|
+
env=env,
|
|
120
|
+
platform_instance=platform_instance,
|
|
121
|
+
)
|
|
122
|
+
super().__init__(urn)
|
|
123
|
+
self._set_extra_aspects(extra_aspects)
|
|
124
|
+
|
|
125
|
+
self._set_platform_instance(urn.orchestrator, platform_instance)
|
|
126
|
+
|
|
127
|
+
# Initialize DataFlowInfoClass directly with name
|
|
128
|
+
self._setdefault_aspect(models.DataFlowInfoClass(name=display_name or name))
|
|
129
|
+
self._ensure_dataflow_props().env = env
|
|
130
|
+
|
|
131
|
+
if description is not None:
|
|
132
|
+
self.set_description(description)
|
|
133
|
+
if display_name is not None:
|
|
134
|
+
self.set_display_name(display_name)
|
|
135
|
+
if external_url is not None:
|
|
136
|
+
self.set_external_url(external_url)
|
|
137
|
+
if custom_properties is not None:
|
|
138
|
+
self.set_custom_properties(custom_properties)
|
|
139
|
+
if created is not None:
|
|
140
|
+
self.set_created(created)
|
|
141
|
+
if last_modified is not None:
|
|
142
|
+
self.set_last_modified(last_modified)
|
|
143
|
+
if subtype is not None:
|
|
144
|
+
self.set_subtype(subtype)
|
|
145
|
+
if owners is not None:
|
|
146
|
+
self.set_owners(owners)
|
|
147
|
+
if links is not None:
|
|
148
|
+
self.set_links(links)
|
|
149
|
+
if tags is not None:
|
|
150
|
+
self.set_tags(tags)
|
|
151
|
+
if terms is not None:
|
|
152
|
+
self.set_terms(terms)
|
|
153
|
+
if domain is not None:
|
|
154
|
+
self.set_domain(domain)
|
|
155
|
+
if parent_container is not unset:
|
|
156
|
+
self._set_container(parent_container)
|
|
157
|
+
if structured_properties is not None:
|
|
158
|
+
for key, value in structured_properties.items():
|
|
159
|
+
self.set_structured_property(property_urn=key, values=value)
|
|
160
|
+
|
|
161
|
+
@classmethod
|
|
162
|
+
def _new_from_graph(cls, urn: Urn, current_aspects: models.AspectBag) -> Self:
|
|
163
|
+
assert isinstance(urn, DataFlowUrn)
|
|
164
|
+
entity = cls(
|
|
165
|
+
platform=urn.orchestrator,
|
|
166
|
+
name=urn.flow_id,
|
|
167
|
+
)
|
|
168
|
+
return entity._init_from_graph(current_aspects)
|
|
169
|
+
|
|
170
|
+
@property
|
|
171
|
+
def urn(self) -> DataFlowUrn:
|
|
172
|
+
return self._urn # type: ignore
|
|
173
|
+
|
|
174
|
+
def _ensure_dataflow_props(self) -> models.DataFlowInfoClass:
|
|
175
|
+
props = self._get_aspect(models.DataFlowInfoClass)
|
|
176
|
+
if props is None:
|
|
177
|
+
# Use name from URN as fallback
|
|
178
|
+
props = models.DataFlowInfoClass(name=self.urn.flow_id)
|
|
179
|
+
self._set_aspect(props)
|
|
180
|
+
return props
|
|
181
|
+
|
|
182
|
+
def _get_editable_props(self) -> Optional[models.EditableDataFlowPropertiesClass]:
|
|
183
|
+
return self._get_aspect(models.EditableDataFlowPropertiesClass)
|
|
184
|
+
|
|
185
|
+
def _ensure_editable_props(self) -> models.EditableDataFlowPropertiesClass:
|
|
186
|
+
# Note that most of the fields in this aspect are not used.
|
|
187
|
+
# The only one that's relevant for us is the description.
|
|
188
|
+
return self._setdefault_aspect(models.EditableDataFlowPropertiesClass())
|
|
189
|
+
|
|
190
|
+
@property
|
|
191
|
+
def description(self) -> Optional[str]:
|
|
192
|
+
"""Get the description of the dataflow.
|
|
193
|
+
Returns:
|
|
194
|
+
The description if set, None otherwise.
|
|
195
|
+
"""
|
|
196
|
+
editable_props = self._get_editable_props()
|
|
197
|
+
return first_non_null(
|
|
198
|
+
[
|
|
199
|
+
editable_props.description if editable_props is not None else None,
|
|
200
|
+
self._ensure_dataflow_props().description,
|
|
201
|
+
]
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
def set_description(self, description: str) -> None:
|
|
205
|
+
"""Set the description of the dataflow.
|
|
206
|
+
Args:
|
|
207
|
+
description: The description to set.
|
|
208
|
+
Note:
|
|
209
|
+
If called during ingestion, this will warn if overwriting
|
|
210
|
+
a non-ingestion description.
|
|
211
|
+
"""
|
|
212
|
+
if is_ingestion_attribution():
|
|
213
|
+
editable_props = self._get_editable_props()
|
|
214
|
+
if editable_props is not None and editable_props.description is not None:
|
|
215
|
+
warnings.warn(
|
|
216
|
+
"Overwriting non-ingestion description from ingestion is an anti-pattern.",
|
|
217
|
+
category=IngestionAttributionWarning,
|
|
218
|
+
stacklevel=2,
|
|
219
|
+
)
|
|
220
|
+
# Force the ingestion description to show up.
|
|
221
|
+
editable_props.description = None
|
|
222
|
+
|
|
223
|
+
self._ensure_dataflow_props().description = description
|
|
224
|
+
else:
|
|
225
|
+
self._ensure_editable_props().description = description
|
|
226
|
+
|
|
227
|
+
@property
|
|
228
|
+
def name(self) -> str:
|
|
229
|
+
"""Get the name of the dataflow.
|
|
230
|
+
Returns:
|
|
231
|
+
The name of the dataflow.
|
|
232
|
+
"""
|
|
233
|
+
return self.urn.flow_id
|
|
234
|
+
|
|
235
|
+
@property
|
|
236
|
+
def display_name(self) -> Optional[str]:
|
|
237
|
+
"""Get the display name of the dataflow.
|
|
238
|
+
Returns:
|
|
239
|
+
The display name if set, None otherwise.
|
|
240
|
+
"""
|
|
241
|
+
return self._ensure_dataflow_props().name
|
|
242
|
+
|
|
243
|
+
def set_display_name(self, display_name: str) -> None:
|
|
244
|
+
"""Set the display name of the dataflow.
|
|
245
|
+
Args:
|
|
246
|
+
display_name: The display name to set.
|
|
247
|
+
"""
|
|
248
|
+
self._ensure_dataflow_props().name = display_name
|
|
249
|
+
|
|
250
|
+
@property
|
|
251
|
+
def external_url(self) -> Optional[str]:
|
|
252
|
+
"""Get the external URL of the dataflow.
|
|
253
|
+
Returns:
|
|
254
|
+
The external URL if set, None otherwise.
|
|
255
|
+
"""
|
|
256
|
+
return self._ensure_dataflow_props().externalUrl
|
|
257
|
+
|
|
258
|
+
def set_external_url(self, external_url: str) -> None:
|
|
259
|
+
"""Set the external URL of the dataflow.
|
|
260
|
+
Args:
|
|
261
|
+
external_url: The external URL to set.
|
|
262
|
+
"""
|
|
263
|
+
self._ensure_dataflow_props().externalUrl = external_url
|
|
264
|
+
|
|
265
|
+
@property
|
|
266
|
+
def custom_properties(self) -> Dict[str, str]:
|
|
267
|
+
"""Get the custom properties of the dataflow.
|
|
268
|
+
Returns:
|
|
269
|
+
Dictionary of custom properties.
|
|
270
|
+
"""
|
|
271
|
+
return self._ensure_dataflow_props().customProperties
|
|
272
|
+
|
|
273
|
+
def set_custom_properties(self, custom_properties: Dict[str, str]) -> None:
|
|
274
|
+
"""Set the custom properties of the dataflow.
|
|
275
|
+
Args:
|
|
276
|
+
custom_properties: Dictionary of custom properties to set.
|
|
277
|
+
"""
|
|
278
|
+
self._ensure_dataflow_props().customProperties = custom_properties
|
|
279
|
+
|
|
280
|
+
@property
|
|
281
|
+
def created(self) -> Optional[datetime]:
|
|
282
|
+
"""Get the creation timestamp of the dataflow.
|
|
283
|
+
Returns:
|
|
284
|
+
The creation timestamp if set, None otherwise.
|
|
285
|
+
"""
|
|
286
|
+
return parse_time_stamp(self._ensure_dataflow_props().created)
|
|
287
|
+
|
|
288
|
+
def set_created(self, created: datetime) -> None:
|
|
289
|
+
"""Set the creation timestamp of the dataflow.
|
|
290
|
+
Args:
|
|
291
|
+
created: The creation timestamp to set.
|
|
292
|
+
"""
|
|
293
|
+
self._ensure_dataflow_props().created = make_time_stamp(created)
|
|
294
|
+
|
|
295
|
+
@property
|
|
296
|
+
def last_modified(self) -> Optional[datetime]:
|
|
297
|
+
"""Get the last modification timestamp of the dataflow.
|
|
298
|
+
Returns:
|
|
299
|
+
The last modification timestamp if set, None otherwise.
|
|
300
|
+
"""
|
|
301
|
+
return parse_time_stamp(self._ensure_dataflow_props().lastModified)
|
|
302
|
+
|
|
303
|
+
def set_last_modified(self, last_modified: datetime) -> None:
|
|
304
|
+
self._ensure_dataflow_props().lastModified = make_time_stamp(last_modified)
|
|
305
|
+
|
|
306
|
+
@property
|
|
307
|
+
def env(self) -> Optional[Union[str, models.FabricTypeClass]]:
|
|
308
|
+
"""Get the environment of the dataflow."""
|
|
309
|
+
return self._ensure_dataflow_props().env
|
datahub/sdk/datajob.py
ADDED
|
@@ -0,0 +1,367 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import warnings
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from typing import Dict, List, Optional, Type
|
|
6
|
+
|
|
7
|
+
from typing_extensions import Self
|
|
8
|
+
|
|
9
|
+
import datahub.emitter.mce_builder as builder
|
|
10
|
+
import datahub.metadata.schema_classes as models
|
|
11
|
+
from datahub.cli.cli_utils import first_non_null
|
|
12
|
+
from datahub.errors import IngestionAttributionWarning
|
|
13
|
+
from datahub.metadata.urns import (
|
|
14
|
+
DataFlowUrn,
|
|
15
|
+
DataJobUrn,
|
|
16
|
+
DatasetUrn,
|
|
17
|
+
Urn,
|
|
18
|
+
)
|
|
19
|
+
from datahub.sdk._attribution import is_ingestion_attribution
|
|
20
|
+
from datahub.sdk._shared import (
|
|
21
|
+
DataflowUrnOrStr,
|
|
22
|
+
DatasetUrnOrStr,
|
|
23
|
+
DomainInputType,
|
|
24
|
+
HasContainer,
|
|
25
|
+
HasDomain,
|
|
26
|
+
HasInstitutionalMemory,
|
|
27
|
+
HasOwnership,
|
|
28
|
+
HasPlatformInstance,
|
|
29
|
+
HasStructuredProperties,
|
|
30
|
+
HasSubtype,
|
|
31
|
+
HasTags,
|
|
32
|
+
HasTerms,
|
|
33
|
+
LinksInputType,
|
|
34
|
+
OwnersInputType,
|
|
35
|
+
StructuredPropertyInputType,
|
|
36
|
+
TagsInputType,
|
|
37
|
+
TermsInputType,
|
|
38
|
+
make_time_stamp,
|
|
39
|
+
parse_time_stamp,
|
|
40
|
+
)
|
|
41
|
+
from datahub.sdk.dataflow import DataFlow
|
|
42
|
+
from datahub.sdk.entity import Entity, ExtraAspectsType
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class DataJob(
|
|
46
|
+
HasPlatformInstance,
|
|
47
|
+
HasSubtype,
|
|
48
|
+
HasContainer,
|
|
49
|
+
HasOwnership,
|
|
50
|
+
HasInstitutionalMemory,
|
|
51
|
+
HasTags,
|
|
52
|
+
HasTerms,
|
|
53
|
+
HasDomain,
|
|
54
|
+
HasStructuredProperties,
|
|
55
|
+
Entity,
|
|
56
|
+
):
|
|
57
|
+
"""Represents a data job in DataHub.
|
|
58
|
+
A data job is an executable unit of a data pipeline, such as an Airflow task or a Spark job.
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
__slots__ = ()
|
|
62
|
+
|
|
63
|
+
@classmethod
|
|
64
|
+
def get_urn_type(cls) -> Type[DataJobUrn]:
|
|
65
|
+
"""Get the URN type for data jobs."""
|
|
66
|
+
return DataJobUrn
|
|
67
|
+
|
|
68
|
+
def __init__( # noqa: C901
|
|
69
|
+
self,
|
|
70
|
+
*,
|
|
71
|
+
name: str,
|
|
72
|
+
flow: Optional[DataFlow] = None,
|
|
73
|
+
flow_urn: Optional[DataflowUrnOrStr] = None,
|
|
74
|
+
platform_instance: Optional[str] = None,
|
|
75
|
+
display_name: Optional[str] = None,
|
|
76
|
+
description: Optional[str] = None,
|
|
77
|
+
external_url: Optional[str] = None,
|
|
78
|
+
custom_properties: Optional[Dict[str, str]] = None,
|
|
79
|
+
created: Optional[datetime] = None,
|
|
80
|
+
last_modified: Optional[datetime] = None,
|
|
81
|
+
# Standard aspects
|
|
82
|
+
subtype: Optional[str] = None,
|
|
83
|
+
owners: Optional[OwnersInputType] = None,
|
|
84
|
+
links: Optional[LinksInputType] = None,
|
|
85
|
+
tags: Optional[TagsInputType] = None,
|
|
86
|
+
terms: Optional[TermsInputType] = None,
|
|
87
|
+
domain: Optional[DomainInputType] = None,
|
|
88
|
+
inlets: Optional[List[DatasetUrnOrStr]] = None,
|
|
89
|
+
outlets: Optional[List[DatasetUrnOrStr]] = None,
|
|
90
|
+
fine_grained_lineages: Optional[List[models.FineGrainedLineageClass]] = None,
|
|
91
|
+
structured_properties: Optional[StructuredPropertyInputType] = None,
|
|
92
|
+
extra_aspects: ExtraAspectsType = None,
|
|
93
|
+
):
|
|
94
|
+
"""
|
|
95
|
+
Initialize a DataJob with either a DataFlow or a DataFlowUrn with platform instance.
|
|
96
|
+
|
|
97
|
+
Args:
|
|
98
|
+
name: Name of the data job (required)
|
|
99
|
+
flow: A DataFlow object (optional)
|
|
100
|
+
flow_urn: A DataFlowUrn object (optional)
|
|
101
|
+
platform_instance: Platform instance name (optional, required if flow_urn is provided)
|
|
102
|
+
... (other optional parameters)
|
|
103
|
+
|
|
104
|
+
Raises:
|
|
105
|
+
ValueError: If neither flow nor (flow_urn and platform_instance) are provided
|
|
106
|
+
"""
|
|
107
|
+
if flow is None:
|
|
108
|
+
if flow_urn is None:
|
|
109
|
+
raise ValueError(
|
|
110
|
+
"You must provide either: 1. a DataFlow object, or 2. a DataFlowUrn (and a platform_instance config if required)"
|
|
111
|
+
)
|
|
112
|
+
flow_urn = DataFlowUrn.from_string(flow_urn)
|
|
113
|
+
if platform_instance and flow_urn.flow_id.startswith(
|
|
114
|
+
f"{platform_instance}."
|
|
115
|
+
):
|
|
116
|
+
flow_name = flow_urn.flow_id[len(platform_instance) + 1 :]
|
|
117
|
+
else:
|
|
118
|
+
flow_name = flow_urn.flow_id
|
|
119
|
+
flow = DataFlow(
|
|
120
|
+
platform=flow_urn.orchestrator,
|
|
121
|
+
name=flow_name,
|
|
122
|
+
platform_instance=platform_instance,
|
|
123
|
+
)
|
|
124
|
+
urn = DataJobUrn.create_from_ids(
|
|
125
|
+
job_id=name,
|
|
126
|
+
data_flow_urn=str(flow.urn),
|
|
127
|
+
)
|
|
128
|
+
super().__init__(urn)
|
|
129
|
+
self._set_extra_aspects(extra_aspects)
|
|
130
|
+
self._set_platform_instance(flow.urn.orchestrator, flow.platform_instance)
|
|
131
|
+
self._set_browse_path_from_flow(flow)
|
|
132
|
+
|
|
133
|
+
# Initialize DataJobInfoClass with default type
|
|
134
|
+
job_info = models.DataJobInfoClass(
|
|
135
|
+
name=display_name or name,
|
|
136
|
+
type=models.AzkabanJobTypeClass.COMMAND, # Default type
|
|
137
|
+
)
|
|
138
|
+
self._setdefault_aspect(job_info)
|
|
139
|
+
self._ensure_datajob_props().flowUrn = str(flow.urn)
|
|
140
|
+
if description is not None:
|
|
141
|
+
self.set_description(description)
|
|
142
|
+
if external_url is not None:
|
|
143
|
+
self.set_external_url(external_url)
|
|
144
|
+
if custom_properties is not None:
|
|
145
|
+
self.set_custom_properties(custom_properties)
|
|
146
|
+
if created is not None:
|
|
147
|
+
self.set_created(created)
|
|
148
|
+
if last_modified is not None:
|
|
149
|
+
self.set_last_modified(last_modified)
|
|
150
|
+
if subtype is not None:
|
|
151
|
+
self.set_subtype(subtype)
|
|
152
|
+
if owners is not None:
|
|
153
|
+
self.set_owners(owners)
|
|
154
|
+
if links is not None:
|
|
155
|
+
self.set_links(links)
|
|
156
|
+
if tags is not None:
|
|
157
|
+
self.set_tags(tags)
|
|
158
|
+
if terms is not None:
|
|
159
|
+
self.set_terms(terms)
|
|
160
|
+
if domain is not None:
|
|
161
|
+
self.set_domain(domain)
|
|
162
|
+
if structured_properties is not None:
|
|
163
|
+
for key, value in structured_properties.items():
|
|
164
|
+
self.set_structured_property(property_urn=key, values=value)
|
|
165
|
+
if inlets is not None:
|
|
166
|
+
self.set_inlets(inlets)
|
|
167
|
+
if outlets is not None:
|
|
168
|
+
self.set_outlets(outlets)
|
|
169
|
+
if fine_grained_lineages is not None:
|
|
170
|
+
self.set_fine_grained_lineages(fine_grained_lineages)
|
|
171
|
+
|
|
172
|
+
if self.flow_urn.cluster.upper() in builder.ALL_ENV_TYPES:
|
|
173
|
+
env = self.flow_urn.cluster.upper()
|
|
174
|
+
self._ensure_datajob_props().env = env
|
|
175
|
+
|
|
176
|
+
@classmethod
|
|
177
|
+
def _new_from_graph(cls, urn: Urn, current_aspects: models.AspectBag) -> Self:
|
|
178
|
+
assert isinstance(urn, DataJobUrn)
|
|
179
|
+
# Extracting platform from the DataFlowUrn inside the DataJobUrn
|
|
180
|
+
data_flow_urn = urn.get_data_flow_urn()
|
|
181
|
+
|
|
182
|
+
entity = cls(
|
|
183
|
+
flow=DataFlow(
|
|
184
|
+
platform=data_flow_urn.orchestrator,
|
|
185
|
+
name=data_flow_urn.flow_id,
|
|
186
|
+
),
|
|
187
|
+
name=urn.job_id,
|
|
188
|
+
)
|
|
189
|
+
return entity._init_from_graph(current_aspects)
|
|
190
|
+
|
|
191
|
+
@property
|
|
192
|
+
def urn(self) -> DataJobUrn:
|
|
193
|
+
return self._urn # type: ignore
|
|
194
|
+
|
|
195
|
+
def _ensure_datajob_props(self) -> models.DataJobInfoClass:
|
|
196
|
+
props = self._get_aspect(models.DataJobInfoClass)
|
|
197
|
+
if props is None:
|
|
198
|
+
# Use name from URN as fallback with default type
|
|
199
|
+
props = models.DataJobInfoClass(
|
|
200
|
+
name=self.urn.job_id, type=models.AzkabanJobTypeClass.COMMAND
|
|
201
|
+
)
|
|
202
|
+
self._set_aspect(props)
|
|
203
|
+
return props
|
|
204
|
+
|
|
205
|
+
def _get_datajob_inputoutput_props(
|
|
206
|
+
self,
|
|
207
|
+
) -> Optional[models.DataJobInputOutputClass]:
|
|
208
|
+
return self._get_aspect(models.DataJobInputOutputClass)
|
|
209
|
+
|
|
210
|
+
def _ensure_datajob_inputoutput_props(self) -> models.DataJobInputOutputClass:
|
|
211
|
+
return self._setdefault_aspect(
|
|
212
|
+
models.DataJobInputOutputClass(inputDatasets=[], outputDatasets=[])
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
def _get_editable_props(self) -> Optional[models.EditableDataJobPropertiesClass]:
|
|
216
|
+
return self._get_aspect(models.EditableDataJobPropertiesClass)
|
|
217
|
+
|
|
218
|
+
def _ensure_editable_props(self) -> models.EditableDataJobPropertiesClass:
|
|
219
|
+
return self._setdefault_aspect(models.EditableDataJobPropertiesClass())
|
|
220
|
+
|
|
221
|
+
@property
|
|
222
|
+
def description(self) -> Optional[str]:
|
|
223
|
+
"""Get the description of the data job."""
|
|
224
|
+
editable_props = self._get_editable_props()
|
|
225
|
+
return first_non_null(
|
|
226
|
+
[
|
|
227
|
+
editable_props.description if editable_props is not None else None,
|
|
228
|
+
self._ensure_datajob_props().description,
|
|
229
|
+
]
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
def set_description(self, description: str) -> None:
|
|
233
|
+
"""Set the description of the data job."""
|
|
234
|
+
if is_ingestion_attribution():
|
|
235
|
+
editable_props = self._get_editable_props()
|
|
236
|
+
if editable_props is not None and editable_props.description is not None:
|
|
237
|
+
warnings.warn(
|
|
238
|
+
"Overwriting non-ingestion description from ingestion is an anti-pattern.",
|
|
239
|
+
category=IngestionAttributionWarning,
|
|
240
|
+
stacklevel=2,
|
|
241
|
+
)
|
|
242
|
+
# Force the ingestion description to show up.
|
|
243
|
+
editable_props.description = None
|
|
244
|
+
|
|
245
|
+
self._ensure_datajob_props().description = description
|
|
246
|
+
else:
|
|
247
|
+
self._ensure_editable_props().description = description
|
|
248
|
+
|
|
249
|
+
@property
|
|
250
|
+
def name(self) -> str:
|
|
251
|
+
"""Get the name of the data job."""
|
|
252
|
+
return self.urn.job_id
|
|
253
|
+
|
|
254
|
+
@property
|
|
255
|
+
def display_name(self) -> Optional[str]:
|
|
256
|
+
"""Get the display name of the data job."""
|
|
257
|
+
return self._ensure_datajob_props().name
|
|
258
|
+
|
|
259
|
+
def set_display_name(self, display_name: str) -> None:
|
|
260
|
+
"""Set the display name of the data job."""
|
|
261
|
+
self._ensure_datajob_props().name = display_name
|
|
262
|
+
|
|
263
|
+
@property
|
|
264
|
+
def external_url(self) -> Optional[str]:
|
|
265
|
+
"""Get the external URL of the data job."""
|
|
266
|
+
return self._ensure_datajob_props().externalUrl
|
|
267
|
+
|
|
268
|
+
def set_external_url(self, external_url: str) -> None:
|
|
269
|
+
"""Set the external URL of the data job."""
|
|
270
|
+
self._ensure_datajob_props().externalUrl = external_url
|
|
271
|
+
|
|
272
|
+
@property
|
|
273
|
+
def custom_properties(self) -> Dict[str, str]:
|
|
274
|
+
"""Get the custom properties of the data job."""
|
|
275
|
+
return self._ensure_datajob_props().customProperties
|
|
276
|
+
|
|
277
|
+
def set_custom_properties(self, custom_properties: Dict[str, str]) -> None:
|
|
278
|
+
"""Set the custom properties of the data job."""
|
|
279
|
+
self._ensure_datajob_props().customProperties = custom_properties
|
|
280
|
+
|
|
281
|
+
@property
|
|
282
|
+
def created(self) -> Optional[datetime]:
|
|
283
|
+
"""Get the creation timestamp of the data job."""
|
|
284
|
+
return parse_time_stamp(self._ensure_datajob_props().created)
|
|
285
|
+
|
|
286
|
+
def set_created(self, created: datetime) -> None:
|
|
287
|
+
"""Set the creation timestamp of the data job."""
|
|
288
|
+
self._ensure_datajob_props().created = make_time_stamp(created)
|
|
289
|
+
|
|
290
|
+
@property
|
|
291
|
+
def last_modified(self) -> Optional[datetime]:
|
|
292
|
+
"""Get the last modification timestamp of the data job."""
|
|
293
|
+
return parse_time_stamp(self._ensure_datajob_props().lastModified)
|
|
294
|
+
|
|
295
|
+
def set_last_modified(self, last_modified: datetime) -> None:
|
|
296
|
+
"""Set the last modification timestamp of the data job."""
|
|
297
|
+
self._ensure_datajob_props().lastModified = make_time_stamp(last_modified)
|
|
298
|
+
|
|
299
|
+
@property
|
|
300
|
+
def flow_urn(self) -> DataFlowUrn:
|
|
301
|
+
"""Get the data flow associated with the data job."""
|
|
302
|
+
return self.urn.get_data_flow_urn()
|
|
303
|
+
|
|
304
|
+
def _set_browse_path_from_flow(self, flow: DataFlow) -> None:
|
|
305
|
+
flow_browse_path = flow._get_aspect(models.BrowsePathsV2Class)
|
|
306
|
+
|
|
307
|
+
# extend the flow's browse path with this job
|
|
308
|
+
browse_path = []
|
|
309
|
+
if flow_browse_path is not None:
|
|
310
|
+
for entry in flow_browse_path.path:
|
|
311
|
+
browse_path.append(
|
|
312
|
+
models.BrowsePathEntryClass(id=entry.id, urn=entry.urn)
|
|
313
|
+
)
|
|
314
|
+
browse_path.append(models.BrowsePathEntryClass(id=flow.name, urn=str(flow.urn)))
|
|
315
|
+
# Set the browse path aspect
|
|
316
|
+
self._set_aspect(models.BrowsePathsV2Class(path=browse_path))
|
|
317
|
+
|
|
318
|
+
# TODO: support datajob input/output
|
|
319
|
+
@property
|
|
320
|
+
def inlets(self) -> List[DatasetUrn]:
|
|
321
|
+
"""Get the inlets of the data job."""
|
|
322
|
+
inlets = self._ensure_datajob_inputoutput_props().inputDatasets
|
|
323
|
+
return [DatasetUrn.from_string(inlet) for inlet in inlets]
|
|
324
|
+
|
|
325
|
+
def set_inlets(self, inlets: List[DatasetUrnOrStr]) -> None:
|
|
326
|
+
"""Set the inlets of the data job."""
|
|
327
|
+
for inlet in inlets:
|
|
328
|
+
inlet_urn = DatasetUrn.from_string(inlet) # type checking
|
|
329
|
+
self._ensure_datajob_inputoutput_props().inputDatasets.append(
|
|
330
|
+
str(inlet_urn)
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
@property
|
|
334
|
+
def outlets(self) -> List[DatasetUrn]:
|
|
335
|
+
"""Get the outlets of the data job."""
|
|
336
|
+
outlets = self._ensure_datajob_inputoutput_props().outputDatasets
|
|
337
|
+
return [DatasetUrn.from_string(outlet) for outlet in outlets]
|
|
338
|
+
|
|
339
|
+
def set_outlets(self, outlets: List[DatasetUrnOrStr]) -> None:
|
|
340
|
+
"""Set the outlets of the data job."""
|
|
341
|
+
for outlet in outlets:
|
|
342
|
+
outlet_urn = DatasetUrn.from_string(outlet) # type checking
|
|
343
|
+
self._ensure_datajob_inputoutput_props().outputDatasets.append(
|
|
344
|
+
str(outlet_urn)
|
|
345
|
+
)
|
|
346
|
+
|
|
347
|
+
@property
|
|
348
|
+
def fine_grained_lineages(self) -> List[models.FineGrainedLineageClass]:
|
|
349
|
+
io_aspect = self._get_datajob_inputoutput_props()
|
|
350
|
+
return (
|
|
351
|
+
io_aspect.fineGrainedLineages
|
|
352
|
+
if io_aspect and io_aspect.fineGrainedLineages
|
|
353
|
+
else []
|
|
354
|
+
)
|
|
355
|
+
|
|
356
|
+
def set_fine_grained_lineages(
|
|
357
|
+
self, lineages: List[models.FineGrainedLineageClass]
|
|
358
|
+
) -> None:
|
|
359
|
+
io_aspect = self._ensure_datajob_inputoutput_props()
|
|
360
|
+
if io_aspect.fineGrainedLineages is None:
|
|
361
|
+
io_aspect.fineGrainedLineages = []
|
|
362
|
+
io_aspect.fineGrainedLineages.extend(lineages)
|
|
363
|
+
|
|
364
|
+
@property
|
|
365
|
+
def env(self) -> Optional[str]:
|
|
366
|
+
"""Get the environment of the data job."""
|
|
367
|
+
return str(self._ensure_datajob_props().env)
|