acryl-datahub 1.1.1rc4__py3-none-any.whl → 1.3.0.1rc9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of acryl-datahub might be problematic. Click here for more details.
- {acryl_datahub-1.1.1rc4.dist-info → acryl_datahub-1.3.0.1rc9.dist-info}/METADATA +2615 -2547
- {acryl_datahub-1.1.1rc4.dist-info → acryl_datahub-1.3.0.1rc9.dist-info}/RECORD +412 -338
- {acryl_datahub-1.1.1rc4.dist-info → acryl_datahub-1.3.0.1rc9.dist-info}/entry_points.txt +5 -0
- datahub/_version.py +1 -1
- datahub/api/entities/assertion/assertion.py +1 -1
- datahub/api/entities/common/serialized_value.py +1 -1
- datahub/api/entities/corpgroup/corpgroup.py +1 -1
- datahub/api/entities/dataproduct/dataproduct.py +32 -3
- datahub/api/entities/dataset/dataset.py +26 -23
- datahub/api/entities/external/__init__.py +0 -0
- datahub/api/entities/external/external_entities.py +724 -0
- datahub/api/entities/external/external_tag.py +147 -0
- datahub/api/entities/external/lake_formation_external_entites.py +162 -0
- datahub/api/entities/external/restricted_text.py +172 -0
- datahub/api/entities/external/unity_catalog_external_entites.py +172 -0
- datahub/api/entities/forms/forms.py +3 -3
- datahub/api/entities/structuredproperties/structuredproperties.py +4 -4
- datahub/api/graphql/operation.py +10 -6
- datahub/cli/check_cli.py +88 -7
- datahub/cli/cli_utils.py +63 -0
- datahub/cli/config_utils.py +18 -10
- datahub/cli/container_cli.py +5 -0
- datahub/cli/delete_cli.py +125 -27
- datahub/cli/docker_check.py +110 -14
- datahub/cli/docker_cli.py +153 -229
- datahub/cli/exists_cli.py +0 -2
- datahub/cli/get_cli.py +0 -2
- datahub/cli/graphql_cli.py +1422 -0
- datahub/cli/iceberg_cli.py +5 -0
- datahub/cli/ingest_cli.py +3 -15
- datahub/cli/migrate.py +2 -0
- datahub/cli/put_cli.py +1 -4
- datahub/cli/quickstart_versioning.py +53 -10
- datahub/cli/specific/assertions_cli.py +37 -6
- datahub/cli/specific/datacontract_cli.py +54 -7
- datahub/cli/specific/dataproduct_cli.py +2 -15
- datahub/cli/specific/dataset_cli.py +1 -8
- datahub/cli/specific/forms_cli.py +0 -4
- datahub/cli/specific/group_cli.py +0 -2
- datahub/cli/specific/structuredproperties_cli.py +1 -4
- datahub/cli/specific/user_cli.py +172 -3
- datahub/cli/state_cli.py +0 -2
- datahub/cli/timeline_cli.py +0 -2
- datahub/configuration/common.py +40 -1
- datahub/configuration/connection_resolver.py +5 -2
- datahub/configuration/env_vars.py +331 -0
- datahub/configuration/import_resolver.py +7 -4
- datahub/configuration/kafka.py +21 -1
- datahub/configuration/pydantic_migration_helpers.py +6 -13
- datahub/configuration/source_common.py +3 -2
- datahub/configuration/validate_field_deprecation.py +5 -2
- datahub/configuration/validate_field_removal.py +8 -2
- datahub/configuration/validate_field_rename.py +6 -5
- datahub/configuration/validate_multiline_string.py +5 -2
- datahub/emitter/mce_builder.py +8 -4
- datahub/emitter/rest_emitter.py +103 -30
- datahub/entrypoints.py +6 -3
- datahub/ingestion/api/auto_work_units/auto_ensure_aspect_size.py +297 -1
- datahub/ingestion/api/auto_work_units/auto_validate_input_fields.py +87 -0
- datahub/ingestion/api/decorators.py +15 -3
- datahub/ingestion/api/report.py +381 -3
- datahub/ingestion/api/sink.py +27 -2
- datahub/ingestion/api/source.py +165 -58
- datahub/ingestion/api/source_protocols.py +23 -0
- datahub/ingestion/autogenerated/__init__.py +0 -0
- datahub/ingestion/autogenerated/capability_summary.json +3652 -0
- datahub/ingestion/autogenerated/lineage.json +402 -0
- datahub/ingestion/autogenerated/lineage_helper.py +177 -0
- datahub/ingestion/extractor/schema_util.py +13 -4
- datahub/ingestion/glossary/classification_mixin.py +5 -0
- datahub/ingestion/graph/client.py +330 -25
- datahub/ingestion/graph/config.py +3 -2
- datahub/ingestion/graph/filters.py +30 -11
- datahub/ingestion/reporting/datahub_ingestion_run_summary_provider.py +21 -11
- datahub/ingestion/run/pipeline.py +81 -11
- datahub/ingestion/run/pipeline_config.py +2 -2
- datahub/ingestion/sink/datahub_kafka.py +1 -0
- datahub/ingestion/sink/datahub_rest.py +13 -5
- datahub/ingestion/sink/file.py +1 -0
- datahub/ingestion/source/abs/config.py +1 -1
- datahub/ingestion/source/abs/datalake_profiler_config.py +1 -1
- datahub/ingestion/source/abs/source.py +15 -30
- datahub/ingestion/source/aws/aws_common.py +185 -13
- datahub/ingestion/source/aws/glue.py +517 -244
- datahub/ingestion/source/aws/platform_resource_repository.py +30 -0
- datahub/ingestion/source/aws/s3_boto_utils.py +100 -5
- datahub/ingestion/source/aws/tag_entities.py +270 -0
- datahub/ingestion/source/azure/azure_common.py +3 -3
- datahub/ingestion/source/bigquery_v2/bigquery.py +67 -24
- datahub/ingestion/source/bigquery_v2/bigquery_config.py +47 -19
- datahub/ingestion/source/bigquery_v2/bigquery_connection.py +12 -1
- datahub/ingestion/source/bigquery_v2/bigquery_queries.py +3 -0
- datahub/ingestion/source/bigquery_v2/bigquery_report.py +0 -2
- datahub/ingestion/source/bigquery_v2/bigquery_schema.py +23 -16
- datahub/ingestion/source/bigquery_v2/bigquery_schema_gen.py +20 -5
- datahub/ingestion/source/bigquery_v2/common.py +1 -1
- datahub/ingestion/source/bigquery_v2/profiler.py +4 -2
- datahub/ingestion/source/bigquery_v2/queries.py +3 -3
- datahub/ingestion/source/bigquery_v2/queries_extractor.py +45 -9
- datahub/ingestion/source/cassandra/cassandra.py +6 -8
- datahub/ingestion/source/cassandra/cassandra_api.py +17 -1
- datahub/ingestion/source/cassandra/cassandra_config.py +5 -0
- datahub/ingestion/source/cassandra/cassandra_profiling.py +7 -6
- datahub/ingestion/source/cassandra/cassandra_utils.py +1 -2
- datahub/ingestion/source/common/gcp_credentials_config.py +3 -1
- datahub/ingestion/source/common/subtypes.py +53 -0
- datahub/ingestion/source/data_lake_common/data_lake_utils.py +37 -0
- datahub/ingestion/source/data_lake_common/object_store.py +115 -27
- datahub/ingestion/source/data_lake_common/path_spec.py +72 -43
- datahub/ingestion/source/datahub/config.py +12 -9
- datahub/ingestion/source/datahub/datahub_database_reader.py +26 -11
- datahub/ingestion/source/datahub/datahub_source.py +10 -0
- datahub/ingestion/source/dbt/dbt_cloud.py +16 -5
- datahub/ingestion/source/dbt/dbt_common.py +224 -9
- datahub/ingestion/source/dbt/dbt_core.py +3 -0
- datahub/ingestion/source/debug/__init__.py +0 -0
- datahub/ingestion/source/debug/datahub_debug.py +300 -0
- datahub/ingestion/source/delta_lake/config.py +9 -5
- datahub/ingestion/source/delta_lake/source.py +8 -0
- datahub/ingestion/source/dremio/dremio_api.py +114 -73
- datahub/ingestion/source/dremio/dremio_aspects.py +3 -2
- datahub/ingestion/source/dremio/dremio_config.py +5 -4
- datahub/ingestion/source/dremio/dremio_reporting.py +22 -3
- datahub/ingestion/source/dremio/dremio_source.py +132 -98
- datahub/ingestion/source/dremio/dremio_sql_queries.py +82 -21
- datahub/ingestion/source/dynamodb/dynamodb.py +11 -8
- datahub/ingestion/source/excel/__init__.py +0 -0
- datahub/ingestion/source/excel/config.py +92 -0
- datahub/ingestion/source/excel/excel_file.py +539 -0
- datahub/ingestion/source/excel/profiling.py +308 -0
- datahub/ingestion/source/excel/report.py +49 -0
- datahub/ingestion/source/excel/source.py +662 -0
- datahub/ingestion/source/excel/util.py +18 -0
- datahub/ingestion/source/feast.py +8 -10
- datahub/ingestion/source/file.py +3 -0
- datahub/ingestion/source/fivetran/config.py +66 -7
- datahub/ingestion/source/fivetran/fivetran.py +227 -43
- datahub/ingestion/source/fivetran/fivetran_log_api.py +37 -8
- datahub/ingestion/source/fivetran/fivetran_query.py +51 -29
- datahub/ingestion/source/fivetran/fivetran_rest_api.py +65 -0
- datahub/ingestion/source/fivetran/response_models.py +97 -0
- datahub/ingestion/source/gc/datahub_gc.py +0 -2
- datahub/ingestion/source/gcs/gcs_source.py +32 -4
- datahub/ingestion/source/ge_data_profiler.py +108 -31
- datahub/ingestion/source/ge_profiling_config.py +26 -11
- datahub/ingestion/source/grafana/entity_mcp_builder.py +272 -0
- datahub/ingestion/source/grafana/field_utils.py +307 -0
- datahub/ingestion/source/grafana/grafana_api.py +142 -0
- datahub/ingestion/source/grafana/grafana_config.py +104 -0
- datahub/ingestion/source/grafana/grafana_source.py +522 -84
- datahub/ingestion/source/grafana/lineage.py +202 -0
- datahub/ingestion/source/grafana/models.py +137 -0
- datahub/ingestion/source/grafana/report.py +90 -0
- datahub/ingestion/source/grafana/types.py +16 -0
- datahub/ingestion/source/hex/api.py +28 -1
- datahub/ingestion/source/hex/hex.py +16 -5
- datahub/ingestion/source/hex/mapper.py +16 -2
- datahub/ingestion/source/hex/model.py +2 -0
- datahub/ingestion/source/hex/query_fetcher.py +1 -1
- datahub/ingestion/source/iceberg/iceberg.py +123 -59
- datahub/ingestion/source/iceberg/iceberg_profiler.py +4 -2
- datahub/ingestion/source/identity/azure_ad.py +1 -1
- datahub/ingestion/source/identity/okta.py +1 -14
- datahub/ingestion/source/kafka/kafka.py +16 -0
- datahub/ingestion/source/kafka_connect/common.py +2 -2
- datahub/ingestion/source/kafka_connect/sink_connectors.py +156 -47
- datahub/ingestion/source/kafka_connect/source_connectors.py +62 -4
- datahub/ingestion/source/looker/looker_common.py +148 -79
- datahub/ingestion/source/looker/looker_config.py +15 -4
- datahub/ingestion/source/looker/looker_constant.py +4 -0
- datahub/ingestion/source/looker/looker_lib_wrapper.py +36 -3
- datahub/ingestion/source/looker/looker_liquid_tag.py +56 -5
- datahub/ingestion/source/looker/looker_source.py +503 -547
- datahub/ingestion/source/looker/looker_view_id_cache.py +1 -1
- datahub/ingestion/source/looker/lookml_concept_context.py +1 -1
- datahub/ingestion/source/looker/lookml_config.py +31 -3
- datahub/ingestion/source/looker/lookml_refinement.py +1 -1
- datahub/ingestion/source/looker/lookml_source.py +96 -117
- datahub/ingestion/source/looker/view_upstream.py +494 -1
- datahub/ingestion/source/metabase.py +32 -6
- datahub/ingestion/source/metadata/business_glossary.py +7 -7
- datahub/ingestion/source/metadata/lineage.py +9 -9
- datahub/ingestion/source/mlflow.py +12 -2
- datahub/ingestion/source/mock_data/__init__.py +0 -0
- datahub/ingestion/source/mock_data/datahub_mock_data.py +533 -0
- datahub/ingestion/source/mock_data/datahub_mock_data_report.py +12 -0
- datahub/ingestion/source/mock_data/table_naming_helper.py +97 -0
- datahub/ingestion/source/mode.py +26 -5
- datahub/ingestion/source/mongodb.py +11 -1
- datahub/ingestion/source/neo4j/neo4j_source.py +83 -144
- datahub/ingestion/source/nifi.py +2 -2
- datahub/ingestion/source/openapi.py +1 -1
- datahub/ingestion/source/powerbi/config.py +47 -21
- datahub/ingestion/source/powerbi/m_query/data_classes.py +1 -0
- datahub/ingestion/source/powerbi/m_query/parser.py +2 -2
- datahub/ingestion/source/powerbi/m_query/pattern_handler.py +100 -10
- datahub/ingestion/source/powerbi/powerbi.py +10 -6
- datahub/ingestion/source/powerbi/rest_api_wrapper/powerbi_api.py +0 -1
- datahub/ingestion/source/powerbi_report_server/report_server.py +0 -23
- datahub/ingestion/source/powerbi_report_server/report_server_domain.py +2 -4
- datahub/ingestion/source/preset.py +3 -3
- datahub/ingestion/source/qlik_sense/data_classes.py +28 -8
- datahub/ingestion/source/qlik_sense/qlik_sense.py +2 -1
- datahub/ingestion/source/redash.py +1 -1
- datahub/ingestion/source/redshift/config.py +15 -9
- datahub/ingestion/source/redshift/datashares.py +1 -1
- datahub/ingestion/source/redshift/lineage.py +386 -687
- datahub/ingestion/source/redshift/query.py +23 -19
- datahub/ingestion/source/redshift/redshift.py +52 -111
- datahub/ingestion/source/redshift/redshift_schema.py +17 -12
- datahub/ingestion/source/redshift/report.py +0 -2
- datahub/ingestion/source/redshift/usage.py +6 -5
- datahub/ingestion/source/s3/report.py +4 -2
- datahub/ingestion/source/s3/source.py +449 -248
- datahub/ingestion/source/sac/sac.py +3 -1
- datahub/ingestion/source/salesforce.py +28 -13
- datahub/ingestion/source/schema/json_schema.py +14 -14
- datahub/ingestion/source/schema_inference/object.py +22 -6
- datahub/ingestion/source/sigma/data_classes.py +3 -0
- datahub/ingestion/source/sigma/sigma.py +7 -1
- datahub/ingestion/source/slack/slack.py +10 -16
- datahub/ingestion/source/snaplogic/__init__.py +0 -0
- datahub/ingestion/source/snaplogic/snaplogic.py +355 -0
- datahub/ingestion/source/snaplogic/snaplogic_config.py +37 -0
- datahub/ingestion/source/snaplogic/snaplogic_lineage_extractor.py +107 -0
- datahub/ingestion/source/snaplogic/snaplogic_parser.py +168 -0
- datahub/ingestion/source/snaplogic/snaplogic_utils.py +31 -0
- datahub/ingestion/source/snowflake/constants.py +3 -0
- datahub/ingestion/source/snowflake/snowflake_config.py +76 -23
- datahub/ingestion/source/snowflake/snowflake_connection.py +24 -8
- datahub/ingestion/source/snowflake/snowflake_lineage_v2.py +19 -6
- datahub/ingestion/source/snowflake/snowflake_queries.py +464 -97
- datahub/ingestion/source/snowflake/snowflake_query.py +77 -5
- datahub/ingestion/source/snowflake/snowflake_report.py +1 -2
- datahub/ingestion/source/snowflake/snowflake_schema.py +352 -16
- datahub/ingestion/source/snowflake/snowflake_schema_gen.py +51 -10
- datahub/ingestion/source/snowflake/snowflake_summary.py +7 -1
- datahub/ingestion/source/snowflake/snowflake_usage_v2.py +8 -2
- datahub/ingestion/source/snowflake/snowflake_utils.py +36 -15
- datahub/ingestion/source/snowflake/snowflake_v2.py +39 -4
- datahub/ingestion/source/snowflake/stored_proc_lineage.py +143 -0
- datahub/ingestion/source/sql/athena.py +217 -25
- datahub/ingestion/source/sql/athena_properties_extractor.py +795 -0
- datahub/ingestion/source/sql/clickhouse.py +24 -8
- datahub/ingestion/source/sql/cockroachdb.py +5 -4
- datahub/ingestion/source/sql/druid.py +2 -2
- datahub/ingestion/source/sql/hana.py +3 -1
- datahub/ingestion/source/sql/hive.py +4 -3
- datahub/ingestion/source/sql/hive_metastore.py +19 -20
- datahub/ingestion/source/sql/mariadb.py +0 -1
- datahub/ingestion/source/sql/mssql/job_models.py +3 -1
- datahub/ingestion/source/sql/mssql/source.py +336 -57
- datahub/ingestion/source/sql/mysql.py +154 -4
- datahub/ingestion/source/sql/oracle.py +5 -5
- datahub/ingestion/source/sql/postgres.py +142 -6
- datahub/ingestion/source/sql/presto.py +2 -1
- datahub/ingestion/source/sql/sql_common.py +281 -49
- datahub/ingestion/source/sql/sql_generic_profiler.py +2 -1
- datahub/ingestion/source/sql/sql_types.py +22 -0
- datahub/ingestion/source/sql/sqlalchemy_uri.py +39 -7
- datahub/ingestion/source/sql/teradata.py +1028 -245
- datahub/ingestion/source/sql/trino.py +11 -1
- datahub/ingestion/source/sql/two_tier_sql_source.py +2 -3
- datahub/ingestion/source/sql/vertica.py +14 -7
- datahub/ingestion/source/sql_queries.py +219 -121
- datahub/ingestion/source/state/checkpoint.py +8 -29
- datahub/ingestion/source/state/entity_removal_state.py +5 -2
- datahub/ingestion/source/state/redundant_run_skip_handler.py +21 -0
- datahub/ingestion/source/state/stateful_ingestion_base.py +36 -11
- datahub/ingestion/source/superset.py +314 -67
- datahub/ingestion/source/tableau/tableau.py +135 -59
- datahub/ingestion/source/tableau/tableau_common.py +9 -2
- datahub/ingestion/source/tableau/tableau_constant.py +1 -4
- datahub/ingestion/source/tableau/tableau_server_wrapper.py +3 -0
- datahub/ingestion/source/unity/config.py +160 -40
- datahub/ingestion/source/unity/connection.py +61 -0
- datahub/ingestion/source/unity/connection_test.py +1 -0
- datahub/ingestion/source/unity/platform_resource_repository.py +19 -0
- datahub/ingestion/source/unity/proxy.py +794 -51
- datahub/ingestion/source/unity/proxy_patch.py +321 -0
- datahub/ingestion/source/unity/proxy_types.py +36 -2
- datahub/ingestion/source/unity/report.py +15 -3
- datahub/ingestion/source/unity/source.py +465 -131
- datahub/ingestion/source/unity/tag_entities.py +197 -0
- datahub/ingestion/source/unity/usage.py +46 -4
- datahub/ingestion/source/usage/clickhouse_usage.py +4 -1
- datahub/ingestion/source/usage/starburst_trino_usage.py +5 -2
- datahub/ingestion/source/usage/usage_common.py +4 -3
- datahub/ingestion/source/vertexai/vertexai.py +1 -1
- datahub/ingestion/source_config/pulsar.py +3 -1
- datahub/ingestion/source_report/ingestion_stage.py +50 -11
- datahub/ingestion/transformer/add_dataset_ownership.py +18 -2
- datahub/ingestion/transformer/base_transformer.py +8 -5
- datahub/ingestion/transformer/set_browse_path.py +112 -0
- datahub/integrations/assertion/snowflake/compiler.py +4 -3
- datahub/metadata/_internal_schema_classes.py +6806 -4871
- datahub/metadata/_urns/urn_defs.py +1767 -1539
- datahub/metadata/com/linkedin/pegasus2avro/application/__init__.py +19 -0
- datahub/metadata/com/linkedin/pegasus2avro/common/__init__.py +2 -0
- datahub/metadata/com/linkedin/pegasus2avro/file/__init__.py +19 -0
- datahub/metadata/com/linkedin/pegasus2avro/identity/__init__.py +2 -0
- datahub/metadata/com/linkedin/pegasus2avro/logical/__init__.py +15 -0
- datahub/metadata/com/linkedin/pegasus2avro/metadata/key/__init__.py +6 -0
- datahub/metadata/com/linkedin/pegasus2avro/module/__init__.py +31 -0
- datahub/metadata/com/linkedin/pegasus2avro/platform/event/v1/__init__.py +4 -0
- datahub/metadata/com/linkedin/pegasus2avro/role/__init__.py +2 -0
- datahub/metadata/com/linkedin/pegasus2avro/settings/asset/__init__.py +19 -0
- datahub/metadata/com/linkedin/pegasus2avro/settings/global/__init__.py +8 -0
- datahub/metadata/com/linkedin/pegasus2avro/template/__init__.py +31 -0
- datahub/metadata/schema.avsc +18395 -16979
- datahub/metadata/schemas/Actors.avsc +38 -1
- datahub/metadata/schemas/ApplicationKey.avsc +31 -0
- datahub/metadata/schemas/ApplicationProperties.avsc +72 -0
- datahub/metadata/schemas/Applications.avsc +38 -0
- datahub/metadata/schemas/AssetSettings.avsc +63 -0
- datahub/metadata/schemas/ChartInfo.avsc +2 -1
- datahub/metadata/schemas/ChartKey.avsc +1 -0
- datahub/metadata/schemas/ContainerKey.avsc +1 -0
- datahub/metadata/schemas/ContainerProperties.avsc +8 -0
- datahub/metadata/schemas/CorpUserEditableInfo.avsc +1 -1
- datahub/metadata/schemas/CorpUserSettings.avsc +50 -0
- datahub/metadata/schemas/DashboardKey.avsc +1 -0
- datahub/metadata/schemas/DataFlowInfo.avsc +8 -0
- datahub/metadata/schemas/DataFlowKey.avsc +1 -0
- datahub/metadata/schemas/DataHubFileInfo.avsc +230 -0
- datahub/metadata/schemas/DataHubFileKey.avsc +21 -0
- datahub/metadata/schemas/DataHubPageModuleKey.avsc +21 -0
- datahub/metadata/schemas/DataHubPageModuleProperties.avsc +298 -0
- datahub/metadata/schemas/DataHubPageTemplateKey.avsc +21 -0
- datahub/metadata/schemas/DataHubPageTemplateProperties.avsc +251 -0
- datahub/metadata/schemas/DataHubPolicyInfo.avsc +12 -1
- datahub/metadata/schemas/DataJobInfo.avsc +8 -0
- datahub/metadata/schemas/DataJobInputOutput.avsc +8 -0
- datahub/metadata/schemas/DataJobKey.avsc +1 -0
- datahub/metadata/schemas/DataProcessKey.avsc +8 -0
- datahub/metadata/schemas/DataProductKey.avsc +3 -1
- datahub/metadata/schemas/DataProductProperties.avsc +1 -1
- datahub/metadata/schemas/DatasetKey.avsc +11 -1
- datahub/metadata/schemas/DatasetUsageStatistics.avsc +8 -0
- datahub/metadata/schemas/DomainKey.avsc +2 -1
- datahub/metadata/schemas/GlobalSettingsInfo.avsc +134 -0
- datahub/metadata/schemas/GlossaryNodeKey.avsc +2 -1
- datahub/metadata/schemas/GlossaryTermKey.avsc +3 -1
- datahub/metadata/schemas/IcebergWarehouseInfo.avsc +8 -0
- datahub/metadata/schemas/IncidentInfo.avsc +3 -3
- datahub/metadata/schemas/InstitutionalMemory.avsc +31 -0
- datahub/metadata/schemas/LogicalParent.avsc +145 -0
- datahub/metadata/schemas/MLFeatureKey.avsc +1 -0
- datahub/metadata/schemas/MLFeatureTableKey.avsc +1 -0
- datahub/metadata/schemas/MLModelDeploymentKey.avsc +8 -0
- datahub/metadata/schemas/MLModelGroupKey.avsc +11 -1
- datahub/metadata/schemas/MLModelKey.avsc +9 -0
- datahub/metadata/schemas/MLPrimaryKeyKey.avsc +1 -0
- datahub/metadata/schemas/MetadataChangeEvent.avsc +151 -47
- datahub/metadata/schemas/MetadataChangeLog.avsc +62 -44
- datahub/metadata/schemas/MetadataChangeProposal.avsc +61 -0
- datahub/metadata/schemas/NotebookKey.avsc +1 -0
- datahub/metadata/schemas/Operation.avsc +4 -2
- datahub/metadata/schemas/Ownership.avsc +69 -0
- datahub/metadata/schemas/QuerySubjects.avsc +1 -12
- datahub/metadata/schemas/RelationshipChangeEvent.avsc +215 -0
- datahub/metadata/schemas/SchemaFieldKey.avsc +4 -1
- datahub/metadata/schemas/StructuredProperties.avsc +69 -0
- datahub/metadata/schemas/StructuredPropertySettings.avsc +9 -0
- datahub/metadata/schemas/SystemMetadata.avsc +61 -0
- datahub/metadata/schemas/UpstreamLineage.avsc +9 -0
- datahub/sdk/__init__.py +2 -0
- datahub/sdk/_all_entities.py +7 -0
- datahub/sdk/_shared.py +249 -5
- datahub/sdk/chart.py +386 -0
- datahub/sdk/container.py +7 -0
- datahub/sdk/dashboard.py +453 -0
- datahub/sdk/dataflow.py +7 -0
- datahub/sdk/datajob.py +45 -13
- datahub/sdk/dataset.py +56 -2
- datahub/sdk/entity_client.py +111 -9
- datahub/sdk/lineage_client.py +663 -82
- datahub/sdk/main_client.py +50 -16
- datahub/sdk/mlmodel.py +120 -38
- datahub/sdk/mlmodelgroup.py +7 -0
- datahub/sdk/search_client.py +7 -3
- datahub/sdk/search_filters.py +304 -36
- datahub/secret/datahub_secret_store.py +3 -0
- datahub/secret/environment_secret_store.py +29 -0
- datahub/secret/file_secret_store.py +49 -0
- datahub/specific/aspect_helpers/fine_grained_lineage.py +76 -0
- datahub/specific/aspect_helpers/siblings.py +73 -0
- datahub/specific/aspect_helpers/structured_properties.py +27 -0
- datahub/specific/chart.py +1 -1
- datahub/specific/datajob.py +15 -1
- datahub/specific/dataproduct.py +4 -0
- datahub/specific/dataset.py +39 -59
- datahub/sql_parsing/split_statements.py +13 -0
- datahub/sql_parsing/sql_parsing_aggregator.py +70 -26
- datahub/sql_parsing/sqlglot_lineage.py +196 -42
- datahub/sql_parsing/sqlglot_utils.py +12 -4
- datahub/sql_parsing/tool_meta_extractor.py +1 -3
- datahub/telemetry/telemetry.py +28 -14
- datahub/testing/sdk_v2_helpers.py +7 -1
- datahub/upgrade/upgrade.py +73 -17
- datahub/utilities/file_backed_collections.py +8 -9
- datahub/utilities/is_pytest.py +3 -2
- datahub/utilities/logging_manager.py +22 -6
- datahub/utilities/mapping.py +29 -2
- datahub/utilities/sample_data.py +5 -4
- datahub/utilities/server_config_util.py +10 -1
- datahub/utilities/sqlalchemy_query_combiner.py +5 -2
- datahub/utilities/stats_collections.py +4 -0
- datahub/utilities/urns/urn.py +41 -2
- datahub/emitter/sql_parsing_builder.py +0 -306
- datahub/ingestion/source/redshift/lineage_v2.py +0 -466
- {acryl_datahub-1.1.1rc4.dist-info → acryl_datahub-1.3.0.1rc9.dist-info}/WHEEL +0 -0
- {acryl_datahub-1.1.1rc4.dist-info → acryl_datahub-1.3.0.1rc9.dist-info}/licenses/LICENSE +0 -0
- {acryl_datahub-1.1.1rc4.dist-info → acryl_datahub-1.3.0.1rc9.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,662 @@
|
|
|
1
|
+
import glob
|
|
2
|
+
import io
|
|
3
|
+
import logging
|
|
4
|
+
import os
|
|
5
|
+
import re
|
|
6
|
+
from datetime import datetime, timezone
|
|
7
|
+
from enum import Enum, auto
|
|
8
|
+
from io import BytesIO
|
|
9
|
+
from pathlib import PurePath
|
|
10
|
+
from typing import Any, Dict, Iterable, Iterator, List, Optional, Tuple, Type, Union
|
|
11
|
+
from urllib.parse import urlparse
|
|
12
|
+
|
|
13
|
+
from datahub.emitter.mce_builder import make_dataset_urn_with_platform_instance
|
|
14
|
+
from datahub.emitter.mcp import MetadataChangeProposalWrapper
|
|
15
|
+
from datahub.ingestion.api.common import PipelineContext
|
|
16
|
+
from datahub.ingestion.api.decorators import (
|
|
17
|
+
SupportStatus,
|
|
18
|
+
capability,
|
|
19
|
+
config_class,
|
|
20
|
+
platform_name,
|
|
21
|
+
support_status,
|
|
22
|
+
)
|
|
23
|
+
from datahub.ingestion.api.source import MetadataWorkUnitProcessor, SourceCapability
|
|
24
|
+
from datahub.ingestion.api.workunit import MetadataWorkUnit
|
|
25
|
+
from datahub.ingestion.source.aws.s3_boto_utils import get_s3_tags
|
|
26
|
+
from datahub.ingestion.source.aws.s3_util import (
|
|
27
|
+
get_bucket_name,
|
|
28
|
+
get_bucket_relative_path,
|
|
29
|
+
strip_s3_prefix,
|
|
30
|
+
)
|
|
31
|
+
from datahub.ingestion.source.azure.abs_folder_utils import (
|
|
32
|
+
get_abs_tags,
|
|
33
|
+
)
|
|
34
|
+
from datahub.ingestion.source.azure.abs_utils import (
|
|
35
|
+
get_container_relative_path,
|
|
36
|
+
strip_abs_prefix,
|
|
37
|
+
)
|
|
38
|
+
from datahub.ingestion.source.data_lake_common.data_lake_utils import ContainerWUCreator
|
|
39
|
+
from datahub.ingestion.source.excel.config import ExcelSourceConfig
|
|
40
|
+
from datahub.ingestion.source.excel.excel_file import ExcelFile, ExcelTable
|
|
41
|
+
from datahub.ingestion.source.excel.profiling import ExcelProfiler
|
|
42
|
+
from datahub.ingestion.source.excel.report import ExcelSourceReport
|
|
43
|
+
from datahub.ingestion.source.excel.util import gen_dataset_name
|
|
44
|
+
from datahub.ingestion.source.s3.source import BrowsePath
|
|
45
|
+
from datahub.ingestion.source.state.stale_entity_removal_handler import (
|
|
46
|
+
StaleEntityRemovalHandler,
|
|
47
|
+
)
|
|
48
|
+
from datahub.ingestion.source.state.stateful_ingestion_base import (
|
|
49
|
+
StatefulIngestionSourceBase,
|
|
50
|
+
)
|
|
51
|
+
from datahub.metadata.com.linkedin.pegasus2avro.common import TimeStamp
|
|
52
|
+
from datahub.metadata.schema_classes import (
|
|
53
|
+
BooleanTypeClass,
|
|
54
|
+
ChangeTypeClass,
|
|
55
|
+
DatasetPropertiesClass,
|
|
56
|
+
DateTypeClass,
|
|
57
|
+
GlobalTagsClass,
|
|
58
|
+
NullTypeClass,
|
|
59
|
+
NumberTypeClass,
|
|
60
|
+
OtherSchemaClass,
|
|
61
|
+
RecordTypeClass,
|
|
62
|
+
SchemaFieldClass as SchemaField,
|
|
63
|
+
SchemaFieldDataTypeClass as SchemaFieldDataType,
|
|
64
|
+
SchemaMetadataClass as SchemaMetadata,
|
|
65
|
+
StringTypeClass,
|
|
66
|
+
)
|
|
67
|
+
from datahub.utilities.perf_timer import PerfTimer
|
|
68
|
+
|
|
69
|
+
logger: logging.Logger = logging.getLogger(__name__)
|
|
70
|
+
|
|
71
|
+
field_type_mapping: Dict[str, Type] = {
|
|
72
|
+
"int8": NumberTypeClass,
|
|
73
|
+
"int16": NumberTypeClass,
|
|
74
|
+
"int32": NumberTypeClass,
|
|
75
|
+
"int64": NumberTypeClass,
|
|
76
|
+
"uint8": NumberTypeClass,
|
|
77
|
+
"uint16": NumberTypeClass,
|
|
78
|
+
"uint32": NumberTypeClass,
|
|
79
|
+
"uint64": NumberTypeClass,
|
|
80
|
+
"Int8": NumberTypeClass,
|
|
81
|
+
"Int16": NumberTypeClass,
|
|
82
|
+
"Int32": NumberTypeClass,
|
|
83
|
+
"Int64": NumberTypeClass,
|
|
84
|
+
"UInt8": NumberTypeClass,
|
|
85
|
+
"UInt16": NumberTypeClass,
|
|
86
|
+
"UInt32": NumberTypeClass,
|
|
87
|
+
"UInt64": NumberTypeClass,
|
|
88
|
+
"intp": NumberTypeClass,
|
|
89
|
+
"uintp": NumberTypeClass,
|
|
90
|
+
"float16": NumberTypeClass,
|
|
91
|
+
"float32": NumberTypeClass,
|
|
92
|
+
"float64": NumberTypeClass,
|
|
93
|
+
"float128": NumberTypeClass,
|
|
94
|
+
"Float32": NumberTypeClass,
|
|
95
|
+
"Float64": NumberTypeClass,
|
|
96
|
+
"complex64": NumberTypeClass,
|
|
97
|
+
"complex128": NumberTypeClass,
|
|
98
|
+
"complex256": NumberTypeClass,
|
|
99
|
+
"bool": BooleanTypeClass,
|
|
100
|
+
"boolean": BooleanTypeClass,
|
|
101
|
+
"object": StringTypeClass,
|
|
102
|
+
"string": StringTypeClass,
|
|
103
|
+
"datetime64": DateTypeClass,
|
|
104
|
+
"datetime64[ns]": DateTypeClass,
|
|
105
|
+
"datetime64[ns, tz]": DateTypeClass,
|
|
106
|
+
"timedelta64": DateTypeClass,
|
|
107
|
+
"timedelta64[ns]": DateTypeClass,
|
|
108
|
+
"period": DateTypeClass,
|
|
109
|
+
"period[D]": DateTypeClass,
|
|
110
|
+
"period[M]": DateTypeClass,
|
|
111
|
+
"period[Y]": DateTypeClass,
|
|
112
|
+
"category": RecordTypeClass,
|
|
113
|
+
"interval": RecordTypeClass,
|
|
114
|
+
"sparse": RecordTypeClass,
|
|
115
|
+
"NA": NullTypeClass,
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
ALLOWED_EXTENSIONS = [".xlsx", ".xlsm", ".xltx", ".xltm"]
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
class UriType(Enum):
|
|
123
|
+
HTTP = auto()
|
|
124
|
+
HTTPS = auto()
|
|
125
|
+
LOCAL_FILE = auto()
|
|
126
|
+
ABSOLUTE_PATH = auto()
|
|
127
|
+
RELATIVE_PATH = auto()
|
|
128
|
+
S3 = auto()
|
|
129
|
+
S3A = auto()
|
|
130
|
+
ABS = auto()
|
|
131
|
+
UNKNOWN = auto()
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
@platform_name("Excel")
|
|
135
|
+
@config_class(ExcelSourceConfig)
|
|
136
|
+
@support_status(SupportStatus.INCUBATING)
|
|
137
|
+
@capability(SourceCapability.CONTAINERS, "Enabled by default")
|
|
138
|
+
@capability(SourceCapability.DATA_PROFILING, "Optionally enabled via configuration")
|
|
139
|
+
@capability(SourceCapability.SCHEMA_METADATA, "Enabled by default")
|
|
140
|
+
@capability(
|
|
141
|
+
SourceCapability.DELETION_DETECTION,
|
|
142
|
+
"Optionally enabled via `stateful_ingestion.remove_stale_metadata`",
|
|
143
|
+
supported=True,
|
|
144
|
+
)
|
|
145
|
+
class ExcelSource(StatefulIngestionSourceBase):
|
|
146
|
+
config: ExcelSourceConfig
|
|
147
|
+
report: ExcelSourceReport
|
|
148
|
+
container_WU_creator: ContainerWUCreator
|
|
149
|
+
platform: str = "excel"
|
|
150
|
+
|
|
151
|
+
def __init__(self, ctx: PipelineContext, config: ExcelSourceConfig):
|
|
152
|
+
super().__init__(config, ctx)
|
|
153
|
+
self.ctx = ctx
|
|
154
|
+
self.config = config
|
|
155
|
+
self.report: ExcelSourceReport = ExcelSourceReport()
|
|
156
|
+
|
|
157
|
+
@classmethod
|
|
158
|
+
def create(cls, config_dict: dict, ctx: PipelineContext) -> "ExcelSource":
|
|
159
|
+
config = ExcelSourceConfig.parse_obj(config_dict)
|
|
160
|
+
return cls(ctx, config)
|
|
161
|
+
|
|
162
|
+
def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]:
|
|
163
|
+
return [
|
|
164
|
+
*super().get_workunit_processors(),
|
|
165
|
+
StaleEntityRemovalHandler.create(
|
|
166
|
+
self, self.config, self.ctx
|
|
167
|
+
).workunit_processor,
|
|
168
|
+
]
|
|
169
|
+
|
|
170
|
+
@staticmethod
|
|
171
|
+
def uri_type(uri: str) -> Tuple[UriType, str]:
|
|
172
|
+
if not uri or not isinstance(uri, str):
|
|
173
|
+
return UriType.UNKNOWN, ""
|
|
174
|
+
|
|
175
|
+
uri = uri.strip()
|
|
176
|
+
parsed = urlparse(uri)
|
|
177
|
+
scheme = parsed.scheme.lower()
|
|
178
|
+
|
|
179
|
+
if scheme == "http":
|
|
180
|
+
return UriType.HTTP, uri[7:]
|
|
181
|
+
elif scheme == "https":
|
|
182
|
+
if parsed.netloc and ".blob.core.windows.net" in parsed.netloc:
|
|
183
|
+
return UriType.ABS, uri[8:]
|
|
184
|
+
else:
|
|
185
|
+
return UriType.HTTPS, uri[8:]
|
|
186
|
+
elif scheme == "file":
|
|
187
|
+
if uri.startswith("file:///"):
|
|
188
|
+
return UriType.LOCAL_FILE, uri[7:]
|
|
189
|
+
|
|
190
|
+
if scheme == "s3":
|
|
191
|
+
return UriType.S3, uri[5:]
|
|
192
|
+
elif scheme == "s3a":
|
|
193
|
+
return UriType.S3A, uri[6:]
|
|
194
|
+
|
|
195
|
+
if scheme:
|
|
196
|
+
return UriType.UNKNOWN, uri[len(scheme) + 3 :]
|
|
197
|
+
|
|
198
|
+
if os.path.isabs(uri):
|
|
199
|
+
return UriType.ABSOLUTE_PATH, uri
|
|
200
|
+
else:
|
|
201
|
+
return UriType.RELATIVE_PATH, uri
|
|
202
|
+
|
|
203
|
+
@staticmethod
|
|
204
|
+
def is_excel_file(path: str) -> bool:
|
|
205
|
+
_, ext = os.path.splitext(path)
|
|
206
|
+
return ext.lower() in ALLOWED_EXTENSIONS
|
|
207
|
+
|
|
208
|
+
@staticmethod
|
|
209
|
+
def local_browser(path_spec: str) -> Iterable[BrowsePath]:
|
|
210
|
+
matching_paths = glob.glob(path_spec, recursive=True)
|
|
211
|
+
matching_files = [path for path in matching_paths if os.path.isfile(path)]
|
|
212
|
+
|
|
213
|
+
for file in sorted(matching_files):
|
|
214
|
+
full_path = PurePath(os.path.normpath(file)).as_posix()
|
|
215
|
+
yield BrowsePath(
|
|
216
|
+
file=full_path,
|
|
217
|
+
timestamp=datetime.fromtimestamp(
|
|
218
|
+
os.path.getmtime(full_path), timezone.utc
|
|
219
|
+
),
|
|
220
|
+
size=os.path.getsize(full_path),
|
|
221
|
+
partitions=[],
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
def get_local_file(self, file_path: str) -> Union[BytesIO, None]:
|
|
225
|
+
try:
|
|
226
|
+
with open(file_path, "rb") as f:
|
|
227
|
+
bytes_io = io.BytesIO(f.read())
|
|
228
|
+
bytes_io.seek(0)
|
|
229
|
+
return bytes_io
|
|
230
|
+
except Exception as e:
|
|
231
|
+
self.report.report_file_dropped(file_path)
|
|
232
|
+
self.report.warning(
|
|
233
|
+
message="Error reading local Excel file",
|
|
234
|
+
context=f"Path={file_path}",
|
|
235
|
+
exc=e,
|
|
236
|
+
)
|
|
237
|
+
return None
|
|
238
|
+
|
|
239
|
+
@staticmethod
|
|
240
|
+
def get_prefix(relative_path: str) -> str:
|
|
241
|
+
index = re.search(r"[*|{]", relative_path)
|
|
242
|
+
if index:
|
|
243
|
+
return relative_path[: index.start()]
|
|
244
|
+
else:
|
|
245
|
+
return relative_path
|
|
246
|
+
|
|
247
|
+
@staticmethod
|
|
248
|
+
def create_s3_path(bucket_name: str, key: str) -> str:
|
|
249
|
+
return f"s3://{bucket_name}/{key}"
|
|
250
|
+
|
|
251
|
+
def create_abs_path(self, key: str) -> str:
|
|
252
|
+
if self.config.azure_config:
|
|
253
|
+
account_name = self.config.azure_config.account_name
|
|
254
|
+
container_name = self.config.azure_config.container_name
|
|
255
|
+
return (
|
|
256
|
+
f"https://{account_name}.blob.core.windows.net/{container_name}/{key}"
|
|
257
|
+
)
|
|
258
|
+
return ""
|
|
259
|
+
|
|
260
|
+
@staticmethod
|
|
261
|
+
def strip_file_prefix(path: str) -> str:
|
|
262
|
+
if path.startswith("/"):
|
|
263
|
+
return path[1:]
|
|
264
|
+
else:
|
|
265
|
+
return path
|
|
266
|
+
|
|
267
|
+
def s3_browser(self, path_spec: str) -> Iterable[BrowsePath]:
|
|
268
|
+
if self.config.aws_config is None:
|
|
269
|
+
raise ValueError("aws_config not set. Cannot browse s3")
|
|
270
|
+
s3 = self.config.aws_config.get_s3_resource(self.config.verify_ssl)
|
|
271
|
+
bucket_name = get_bucket_name(path_spec)
|
|
272
|
+
logger.debug(f"Scanning bucket: {bucket_name}")
|
|
273
|
+
bucket = s3.Bucket(bucket_name)
|
|
274
|
+
prefix = self.get_prefix(get_bucket_relative_path(path_spec))
|
|
275
|
+
logger.debug(f"Scanning objects with prefix:{prefix}")
|
|
276
|
+
|
|
277
|
+
for obj in bucket.objects.filter(Prefix=prefix).page_size(1000):
|
|
278
|
+
s3_path = self.create_s3_path(obj.bucket_name, obj.key)
|
|
279
|
+
logger.debug(f"Path: {s3_path}")
|
|
280
|
+
|
|
281
|
+
yield BrowsePath(
|
|
282
|
+
file=s3_path,
|
|
283
|
+
timestamp=obj.last_modified,
|
|
284
|
+
size=obj.size,
|
|
285
|
+
partitions=[],
|
|
286
|
+
content_type=None,
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
def get_s3_file(self, path_spec: str) -> Union[BytesIO, None]:
|
|
290
|
+
if self.config.aws_config is None:
|
|
291
|
+
raise ValueError("aws_config not set. Cannot browse s3")
|
|
292
|
+
s3 = self.config.aws_config.get_s3_resource(self.config.verify_ssl)
|
|
293
|
+
bucket_name = get_bucket_name(path_spec)
|
|
294
|
+
key = get_bucket_relative_path(path_spec)
|
|
295
|
+
logger.debug(f"Getting file: {key} from bucket: {bucket_name}")
|
|
296
|
+
try:
|
|
297
|
+
obj = s3.Object(bucket_name, key)
|
|
298
|
+
file_content = obj.get()["Body"].read()
|
|
299
|
+
binary_stream = io.BytesIO(file_content)
|
|
300
|
+
binary_stream.seek(0)
|
|
301
|
+
return binary_stream
|
|
302
|
+
except Exception as e:
|
|
303
|
+
self.report.report_file_dropped(path_spec)
|
|
304
|
+
self.report.warning(
|
|
305
|
+
message="Error reading Excel file from S3",
|
|
306
|
+
context=f"Path={path_spec}",
|
|
307
|
+
exc=e,
|
|
308
|
+
)
|
|
309
|
+
return None
|
|
310
|
+
|
|
311
|
+
def process_s3_tags(
|
|
312
|
+
self, path_spec: str, dataset_urn: str
|
|
313
|
+
) -> Iterable[MetadataWorkUnit]:
|
|
314
|
+
bucket_name = get_bucket_name(path_spec)
|
|
315
|
+
key = get_bucket_relative_path(path_spec)
|
|
316
|
+
|
|
317
|
+
s3_tags = get_s3_tags(
|
|
318
|
+
bucket_name,
|
|
319
|
+
key,
|
|
320
|
+
dataset_urn,
|
|
321
|
+
self.config.aws_config,
|
|
322
|
+
self.ctx,
|
|
323
|
+
self.config.use_s3_bucket_tags,
|
|
324
|
+
self.config.use_s3_object_tags,
|
|
325
|
+
self.config.verify_ssl,
|
|
326
|
+
)
|
|
327
|
+
|
|
328
|
+
if s3_tags:
|
|
329
|
+
yield from self.process_global_tags(s3_tags, dataset_urn)
|
|
330
|
+
|
|
331
|
+
def abs_browser(self, path_spec: str) -> Iterable[BrowsePath]:
|
|
332
|
+
if self.config.azure_config is None:
|
|
333
|
+
raise ValueError("azure_config not set. Cannot browse Azure Blob Storage")
|
|
334
|
+
abs_blob_service_client = self.config.azure_config.get_blob_service_client()
|
|
335
|
+
container_client = abs_blob_service_client.get_container_client(
|
|
336
|
+
self.config.azure_config.container_name
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
container_name = self.config.azure_config.container_name
|
|
340
|
+
logger.debug(f"Scanning container: {container_name}")
|
|
341
|
+
|
|
342
|
+
prefix = self.get_prefix(get_container_relative_path(path_spec))
|
|
343
|
+
logger.debug(f"Scanning objects with prefix: {prefix}")
|
|
344
|
+
|
|
345
|
+
for obj in container_client.list_blobs(
|
|
346
|
+
name_starts_with=f"{prefix}", results_per_page=1000
|
|
347
|
+
):
|
|
348
|
+
abs_path = self.create_abs_path(obj.name)
|
|
349
|
+
logger.debug(f"Path: {abs_path}")
|
|
350
|
+
|
|
351
|
+
yield BrowsePath(
|
|
352
|
+
file=abs_path,
|
|
353
|
+
timestamp=obj.last_modified,
|
|
354
|
+
size=obj.size,
|
|
355
|
+
partitions=[],
|
|
356
|
+
content_type=None,
|
|
357
|
+
)
|
|
358
|
+
|
|
359
|
+
def get_abs_file(self, path_spec: str) -> Union[BytesIO, None]:
|
|
360
|
+
if self.config.azure_config is None:
|
|
361
|
+
raise ValueError("azure_config not set. Cannot browse Azure Blob Storage")
|
|
362
|
+
abs_blob_service_client = self.config.azure_config.get_blob_service_client()
|
|
363
|
+
container_client = abs_blob_service_client.get_container_client(
|
|
364
|
+
self.config.azure_config.container_name
|
|
365
|
+
)
|
|
366
|
+
|
|
367
|
+
container_name = self.config.azure_config.container_name
|
|
368
|
+
blob_path = get_container_relative_path(path_spec)
|
|
369
|
+
logger.debug(f"Getting file: {blob_path} from container: {container_name}")
|
|
370
|
+
|
|
371
|
+
try:
|
|
372
|
+
blob_client = container_client.get_blob_client(blob_path)
|
|
373
|
+
download_stream = blob_client.download_blob()
|
|
374
|
+
file_content = download_stream.readall()
|
|
375
|
+
binary_stream = io.BytesIO(file_content)
|
|
376
|
+
binary_stream.seek(0)
|
|
377
|
+
return binary_stream
|
|
378
|
+
except Exception as e:
|
|
379
|
+
self.report.report_file_dropped(path_spec)
|
|
380
|
+
self.report.warning(
|
|
381
|
+
message="Error reading Excel file from Azure Blob Storage",
|
|
382
|
+
context=f"Path={path_spec}",
|
|
383
|
+
exc=e,
|
|
384
|
+
)
|
|
385
|
+
return None
|
|
386
|
+
|
|
387
|
+
def process_abs_tags(
|
|
388
|
+
self, path_spec: str, dataset_urn: str
|
|
389
|
+
) -> Iterable[MetadataWorkUnit]:
|
|
390
|
+
if (
|
|
391
|
+
self.config.azure_config
|
|
392
|
+
and self.config.azure_config.container_name is not None
|
|
393
|
+
):
|
|
394
|
+
container_name = self.config.azure_config.container_name
|
|
395
|
+
blob_path = get_container_relative_path(path_spec)
|
|
396
|
+
|
|
397
|
+
abs_tags = get_abs_tags(
|
|
398
|
+
container_name,
|
|
399
|
+
blob_path,
|
|
400
|
+
dataset_urn,
|
|
401
|
+
self.config.azure_config,
|
|
402
|
+
self.ctx,
|
|
403
|
+
self.config.use_abs_blob_tags,
|
|
404
|
+
)
|
|
405
|
+
|
|
406
|
+
if abs_tags:
|
|
407
|
+
yield from self.process_global_tags(abs_tags, dataset_urn)
|
|
408
|
+
|
|
409
|
+
@staticmethod
|
|
410
|
+
def get_field_type(field_type: str) -> SchemaFieldDataType:
|
|
411
|
+
type_class = field_type_mapping.get(field_type, NullTypeClass)
|
|
412
|
+
return SchemaFieldDataType(type=type_class())
|
|
413
|
+
|
|
414
|
+
def construct_schema_field(self, f_name: str, f_type: str) -> SchemaField:
|
|
415
|
+
logger.debug(f"Field: {f_name} Type: {f_type}")
|
|
416
|
+
return SchemaField(
|
|
417
|
+
fieldPath=f_name,
|
|
418
|
+
nativeDataType=f_type,
|
|
419
|
+
type=self.get_field_type(f_type),
|
|
420
|
+
description=None,
|
|
421
|
+
nullable=False,
|
|
422
|
+
recursive=False,
|
|
423
|
+
)
|
|
424
|
+
|
|
425
|
+
def construct_schema_metadata(
|
|
426
|
+
self,
|
|
427
|
+
name: str,
|
|
428
|
+
dataset: ExcelTable,
|
|
429
|
+
) -> SchemaMetadata:
|
|
430
|
+
canonical_schema: List[SchemaField] = []
|
|
431
|
+
|
|
432
|
+
# Get data types for each column
|
|
433
|
+
data_types = dataset.df.dtypes.to_dict()
|
|
434
|
+
|
|
435
|
+
# Convert numpy types to string representation for better readability
|
|
436
|
+
data_types = {col: str(dtype) for col, dtype in data_types.items()}
|
|
437
|
+
|
|
438
|
+
for f_name, f_type in data_types.items():
|
|
439
|
+
canonical_schema.append(self.construct_schema_field(f_name, f_type))
|
|
440
|
+
|
|
441
|
+
return SchemaMetadata(
|
|
442
|
+
schemaName=name,
|
|
443
|
+
platform=f"urn:li:dataPlatform:{self.platform}",
|
|
444
|
+
version=0,
|
|
445
|
+
hash="",
|
|
446
|
+
platformSchema=OtherSchemaClass(rawSchema=""),
|
|
447
|
+
fields=canonical_schema,
|
|
448
|
+
)
|
|
449
|
+
|
|
450
|
+
@staticmethod
|
|
451
|
+
def get_dataset_attributes(metadata: Dict[str, Any]) -> dict:
|
|
452
|
+
result = {}
|
|
453
|
+
for key, value in metadata.items():
|
|
454
|
+
result[key] = str(value)
|
|
455
|
+
return result
|
|
456
|
+
|
|
457
|
+
@staticmethod
|
|
458
|
+
def process_global_tags(
|
|
459
|
+
global_tags: GlobalTagsClass, dataset_urn: str
|
|
460
|
+
) -> Iterable[MetadataWorkUnit]:
|
|
461
|
+
yield MetadataChangeProposalWrapper(
|
|
462
|
+
entityType="dataset",
|
|
463
|
+
entityUrn=dataset_urn,
|
|
464
|
+
aspect=global_tags,
|
|
465
|
+
changeType=ChangeTypeClass.UPSERT,
|
|
466
|
+
).as_workunit()
|
|
467
|
+
|
|
468
|
+
def process_dataset(
|
|
469
|
+
self,
|
|
470
|
+
relative_path: str,
|
|
471
|
+
full_path: str,
|
|
472
|
+
filename: str,
|
|
473
|
+
table: ExcelTable,
|
|
474
|
+
source_type: UriType,
|
|
475
|
+
) -> Iterable[MetadataWorkUnit]:
|
|
476
|
+
self.report.report_worksheet_processed()
|
|
477
|
+
dataset_name = gen_dataset_name(
|
|
478
|
+
relative_path, table.sheet_name, self.config.convert_urns_to_lowercase
|
|
479
|
+
)
|
|
480
|
+
dataset_urn = make_dataset_urn_with_platform_instance(
|
|
481
|
+
platform=self.platform,
|
|
482
|
+
name=dataset_name,
|
|
483
|
+
platform_instance=self.config.platform_instance,
|
|
484
|
+
env=self.config.env,
|
|
485
|
+
)
|
|
486
|
+
|
|
487
|
+
attributes = self.get_dataset_attributes(table.metadata)
|
|
488
|
+
created: Optional[datetime] = table.metadata.get("created")
|
|
489
|
+
modified: Optional[datetime] = table.metadata.get("modified")
|
|
490
|
+
dataset_properties = DatasetPropertiesClass(
|
|
491
|
+
tags=[],
|
|
492
|
+
customProperties=attributes,
|
|
493
|
+
created=(
|
|
494
|
+
TimeStamp(time=int(created.timestamp() * 1000)) if created else None
|
|
495
|
+
),
|
|
496
|
+
lastModified=(
|
|
497
|
+
TimeStamp(time=int(modified.timestamp() * 1000)) if modified else None
|
|
498
|
+
),
|
|
499
|
+
)
|
|
500
|
+
|
|
501
|
+
schema_metadata = self.construct_schema_metadata(
|
|
502
|
+
name=dataset_name,
|
|
503
|
+
dataset=table,
|
|
504
|
+
)
|
|
505
|
+
|
|
506
|
+
yield MetadataChangeProposalWrapper(
|
|
507
|
+
entityUrn=dataset_urn,
|
|
508
|
+
aspect=schema_metadata,
|
|
509
|
+
).as_workunit()
|
|
510
|
+
|
|
511
|
+
yield MetadataChangeProposalWrapper(
|
|
512
|
+
entityUrn=dataset_urn,
|
|
513
|
+
aspect=dataset_properties,
|
|
514
|
+
).as_workunit()
|
|
515
|
+
|
|
516
|
+
yield from self.container_WU_creator.create_container_hierarchy(
|
|
517
|
+
relative_path, dataset_urn
|
|
518
|
+
)
|
|
519
|
+
|
|
520
|
+
if source_type == UriType.S3 and (
|
|
521
|
+
self.config.use_s3_bucket_tags or self.config.use_s3_object_tags
|
|
522
|
+
):
|
|
523
|
+
yield from self.process_s3_tags(full_path, dataset_urn)
|
|
524
|
+
elif source_type == UriType.ABS and self.config.use_abs_blob_tags:
|
|
525
|
+
yield from self.process_abs_tags(full_path, dataset_urn)
|
|
526
|
+
|
|
527
|
+
if self.config.is_profiling_enabled():
|
|
528
|
+
profiler = ExcelProfiler(
|
|
529
|
+
self.config,
|
|
530
|
+
self.report,
|
|
531
|
+
table.df,
|
|
532
|
+
filename,
|
|
533
|
+
table.sheet_name,
|
|
534
|
+
dataset_urn,
|
|
535
|
+
relative_path,
|
|
536
|
+
)
|
|
537
|
+
yield from profiler.get_workunits()
|
|
538
|
+
|
|
539
|
+
def process_file(
|
|
540
|
+
self,
|
|
541
|
+
file_content: BytesIO,
|
|
542
|
+
relative_path: str,
|
|
543
|
+
full_path: str,
|
|
544
|
+
filename: str,
|
|
545
|
+
source_type: UriType,
|
|
546
|
+
) -> Iterable[MetadataWorkUnit]:
|
|
547
|
+
self.report.report_file_processed()
|
|
548
|
+
xls = ExcelFile(filename, file_content, self.report)
|
|
549
|
+
result = xls.load_workbook()
|
|
550
|
+
|
|
551
|
+
if result:
|
|
552
|
+
for table in xls.get_tables(active_only=self.config.active_sheet_only):
|
|
553
|
+
self.report.report_worksheet_scanned()
|
|
554
|
+
dataset_name = gen_dataset_name(
|
|
555
|
+
relative_path,
|
|
556
|
+
table.sheet_name,
|
|
557
|
+
self.config.convert_urns_to_lowercase,
|
|
558
|
+
)
|
|
559
|
+
if not self.config.worksheet_pattern.allowed(dataset_name):
|
|
560
|
+
self.report.report_dropped(dataset_name)
|
|
561
|
+
continue
|
|
562
|
+
yield from self.process_dataset(
|
|
563
|
+
relative_path, full_path, filename, table, source_type
|
|
564
|
+
)
|
|
565
|
+
|
|
566
|
+
def check_file_is_valid(self, filename: str) -> bool:
|
|
567
|
+
self.report.report_file_scanned()
|
|
568
|
+
if not self.config.path_pattern.allowed(filename):
|
|
569
|
+
self.report.report_dropped(filename)
|
|
570
|
+
return False
|
|
571
|
+
elif not self.is_excel_file(filename):
|
|
572
|
+
logger.debug(f"File is not an Excel workbook: {filename}")
|
|
573
|
+
return False
|
|
574
|
+
return True
|
|
575
|
+
|
|
576
|
+
def retrieve_file_data(
|
|
577
|
+
self, uri_type: UriType, path: str, path_spec: str
|
|
578
|
+
) -> Iterator[Tuple[BytesIO, str, str, str]]:
|
|
579
|
+
if (
|
|
580
|
+
uri_type == UriType.LOCAL_FILE
|
|
581
|
+
or uri_type == UriType.ABSOLUTE_PATH
|
|
582
|
+
or uri_type == UriType.RELATIVE_PATH
|
|
583
|
+
):
|
|
584
|
+
logger.debug(f"Searching local path: {path}")
|
|
585
|
+
for browse_path in self.local_browser(path):
|
|
586
|
+
if self.check_file_is_valid(browse_path.file):
|
|
587
|
+
basename = os.path.basename(browse_path.file)
|
|
588
|
+
file_path = self.strip_file_prefix(browse_path.file)
|
|
589
|
+
filename = os.path.splitext(basename)[0]
|
|
590
|
+
|
|
591
|
+
logger.debug(f"Processing {filename}")
|
|
592
|
+
with self.report.local_file_get_timer:
|
|
593
|
+
file_data = self.get_local_file(browse_path.file)
|
|
594
|
+
|
|
595
|
+
if file_data is not None:
|
|
596
|
+
yield file_data, file_path, browse_path.file, filename
|
|
597
|
+
|
|
598
|
+
elif uri_type == UriType.S3:
|
|
599
|
+
logger.debug(f"Searching S3 path: {path}")
|
|
600
|
+
for browse_path in self.s3_browser(path_spec):
|
|
601
|
+
if self.check_file_is_valid(browse_path.file):
|
|
602
|
+
uri_path = strip_s3_prefix(browse_path.file)
|
|
603
|
+
basename = os.path.basename(uri_path)
|
|
604
|
+
filename = os.path.splitext(basename)[0]
|
|
605
|
+
|
|
606
|
+
logger.debug(f"Processing {browse_path.file}")
|
|
607
|
+
with self.report.s3_file_get_timer:
|
|
608
|
+
file_data = self.get_s3_file(browse_path.file)
|
|
609
|
+
|
|
610
|
+
if file_data is not None:
|
|
611
|
+
yield file_data, uri_path, browse_path.file, filename
|
|
612
|
+
|
|
613
|
+
elif uri_type == UriType.ABS:
|
|
614
|
+
logger.debug(f"Searching Azure Blob Storage path: {path}")
|
|
615
|
+
for browse_path in self.abs_browser(path_spec):
|
|
616
|
+
if self.check_file_is_valid(browse_path.file):
|
|
617
|
+
uri_path = strip_abs_prefix(browse_path.file)
|
|
618
|
+
basename = os.path.basename(uri_path)
|
|
619
|
+
filename = os.path.splitext(basename)[0]
|
|
620
|
+
|
|
621
|
+
logger.debug(f"Processing {browse_path.file}")
|
|
622
|
+
with self.report.abs_file_get_timer:
|
|
623
|
+
file_data = self.get_abs_file(browse_path.file)
|
|
624
|
+
|
|
625
|
+
if file_data is not None:
|
|
626
|
+
yield file_data, uri_path, browse_path.file, filename
|
|
627
|
+
|
|
628
|
+
else:
|
|
629
|
+
self.report.report_file_dropped(path_spec)
|
|
630
|
+
self.report.warning(
|
|
631
|
+
message="Unsupported URI Type",
|
|
632
|
+
context=f"Type={uri_type.name},URI={path_spec}",
|
|
633
|
+
)
|
|
634
|
+
|
|
635
|
+
def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]:
|
|
636
|
+
self.container_WU_creator = ContainerWUCreator(
|
|
637
|
+
self.platform,
|
|
638
|
+
self.config.platform_instance,
|
|
639
|
+
self.config.env,
|
|
640
|
+
)
|
|
641
|
+
|
|
642
|
+
with PerfTimer() as timer:
|
|
643
|
+
for path_spec in self.config.path_list:
|
|
644
|
+
logger.debug(f"Processing path: {path_spec}")
|
|
645
|
+
uri_type, path = self.uri_type(path_spec)
|
|
646
|
+
logger.debug(f"URI Type: {uri_type} Path: {path}")
|
|
647
|
+
|
|
648
|
+
for (
|
|
649
|
+
file_data,
|
|
650
|
+
relative_path,
|
|
651
|
+
full_path,
|
|
652
|
+
filename,
|
|
653
|
+
) in self.retrieve_file_data(uri_type, path, path_spec):
|
|
654
|
+
yield from self.process_file(
|
|
655
|
+
file_data, relative_path, full_path, filename, uri_type
|
|
656
|
+
)
|
|
657
|
+
|
|
658
|
+
time_taken = timer.elapsed_seconds()
|
|
659
|
+
logger.info(f"Finished ingestion in {time_taken:.3f} seconds")
|
|
660
|
+
|
|
661
|
+
def get_report(self):
|
|
662
|
+
return self.report
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def gen_dataset_name(path: str, sheet_name: str, lower_case: bool) -> str:
|
|
5
|
+
sheet_name = sheet_name.strip()
|
|
6
|
+
directory, filename = os.path.split(path)
|
|
7
|
+
|
|
8
|
+
if not directory:
|
|
9
|
+
excel_path = f"[{filename}]"
|
|
10
|
+
else:
|
|
11
|
+
excel_path = os.path.join(directory, f"[{filename}]")
|
|
12
|
+
|
|
13
|
+
name = f"{excel_path}{sheet_name}"
|
|
14
|
+
|
|
15
|
+
if lower_case:
|
|
16
|
+
name = name.lower()
|
|
17
|
+
|
|
18
|
+
return name
|