acryl-datahub 1.1.1rc4__py3-none-any.whl → 1.2.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub might be problematic. Click here for more details.

Files changed (223) hide show
  1. {acryl_datahub-1.1.1rc4.dist-info → acryl_datahub-1.2.0.1.dist-info}/METADATA +2511 -2484
  2. {acryl_datahub-1.1.1rc4.dist-info → acryl_datahub-1.2.0.1.dist-info}/RECORD +223 -189
  3. {acryl_datahub-1.1.1rc4.dist-info → acryl_datahub-1.2.0.1.dist-info}/entry_points.txt +2 -0
  4. datahub/_version.py +1 -1
  5. datahub/api/entities/dataset/dataset.py +1 -1
  6. datahub/api/entities/external/__init__.py +0 -0
  7. datahub/api/entities/external/external_entities.py +239 -0
  8. datahub/api/entities/external/external_tag.py +145 -0
  9. datahub/api/entities/external/lake_formation_external_entites.py +161 -0
  10. datahub/api/entities/external/restricted_text.py +247 -0
  11. datahub/api/entities/external/unity_catalog_external_entites.py +173 -0
  12. datahub/cli/check_cli.py +88 -7
  13. datahub/cli/cli_utils.py +63 -0
  14. datahub/cli/container_cli.py +5 -0
  15. datahub/cli/delete_cli.py +124 -27
  16. datahub/cli/docker_check.py +107 -12
  17. datahub/cli/docker_cli.py +149 -227
  18. datahub/cli/exists_cli.py +0 -2
  19. datahub/cli/get_cli.py +0 -2
  20. datahub/cli/iceberg_cli.py +5 -0
  21. datahub/cli/ingest_cli.py +3 -15
  22. datahub/cli/migrate.py +2 -0
  23. datahub/cli/put_cli.py +1 -4
  24. datahub/cli/quickstart_versioning.py +50 -7
  25. datahub/cli/specific/assertions_cli.py +0 -4
  26. datahub/cli/specific/datacontract_cli.py +0 -3
  27. datahub/cli/specific/dataproduct_cli.py +0 -11
  28. datahub/cli/specific/dataset_cli.py +1 -8
  29. datahub/cli/specific/forms_cli.py +0 -4
  30. datahub/cli/specific/group_cli.py +0 -2
  31. datahub/cli/specific/structuredproperties_cli.py +1 -4
  32. datahub/cli/specific/user_cli.py +0 -2
  33. datahub/cli/state_cli.py +0 -2
  34. datahub/cli/timeline_cli.py +0 -2
  35. datahub/configuration/pydantic_migration_helpers.py +7 -5
  36. datahub/emitter/rest_emitter.py +70 -12
  37. datahub/entrypoints.py +4 -3
  38. datahub/ingestion/api/decorators.py +15 -3
  39. datahub/ingestion/api/report.py +332 -3
  40. datahub/ingestion/api/sink.py +3 -0
  41. datahub/ingestion/api/source.py +48 -44
  42. datahub/ingestion/autogenerated/__init__.py +0 -0
  43. datahub/ingestion/autogenerated/capability_summary.json +3449 -0
  44. datahub/ingestion/autogenerated/lineage.json +401 -0
  45. datahub/ingestion/autogenerated/lineage_helper.py +177 -0
  46. datahub/ingestion/extractor/schema_util.py +13 -4
  47. datahub/ingestion/glossary/classification_mixin.py +5 -0
  48. datahub/ingestion/graph/client.py +100 -15
  49. datahub/ingestion/graph/config.py +1 -0
  50. datahub/ingestion/reporting/datahub_ingestion_run_summary_provider.py +20 -10
  51. datahub/ingestion/run/pipeline.py +54 -2
  52. datahub/ingestion/sink/datahub_rest.py +13 -0
  53. datahub/ingestion/source/abs/source.py +1 -1
  54. datahub/ingestion/source/aws/aws_common.py +4 -0
  55. datahub/ingestion/source/aws/glue.py +489 -244
  56. datahub/ingestion/source/aws/tag_entities.py +292 -0
  57. datahub/ingestion/source/azure/azure_common.py +2 -2
  58. datahub/ingestion/source/bigquery_v2/bigquery.py +50 -23
  59. datahub/ingestion/source/bigquery_v2/bigquery_config.py +1 -1
  60. datahub/ingestion/source/bigquery_v2/bigquery_queries.py +1 -0
  61. datahub/ingestion/source/bigquery_v2/bigquery_schema_gen.py +2 -0
  62. datahub/ingestion/source/bigquery_v2/common.py +1 -1
  63. datahub/ingestion/source/bigquery_v2/profiler.py +4 -2
  64. datahub/ingestion/source/bigquery_v2/queries.py +3 -3
  65. datahub/ingestion/source/cassandra/cassandra.py +1 -1
  66. datahub/ingestion/source/cassandra/cassandra_profiling.py +6 -5
  67. datahub/ingestion/source/common/subtypes.py +45 -0
  68. datahub/ingestion/source/data_lake_common/object_store.py +115 -27
  69. datahub/ingestion/source/data_lake_common/path_spec.py +10 -21
  70. datahub/ingestion/source/datahub/datahub_database_reader.py +1 -2
  71. datahub/ingestion/source/dbt/dbt_cloud.py +10 -2
  72. datahub/ingestion/source/dbt/dbt_common.py +6 -2
  73. datahub/ingestion/source/dbt/dbt_core.py +3 -0
  74. datahub/ingestion/source/debug/__init__.py +0 -0
  75. datahub/ingestion/source/debug/datahub_debug.py +300 -0
  76. datahub/ingestion/source/dremio/dremio_api.py +114 -73
  77. datahub/ingestion/source/dremio/dremio_config.py +2 -0
  78. datahub/ingestion/source/dremio/dremio_reporting.py +23 -2
  79. datahub/ingestion/source/dremio/dremio_source.py +94 -81
  80. datahub/ingestion/source/dremio/dremio_sql_queries.py +82 -21
  81. datahub/ingestion/source/file.py +3 -0
  82. datahub/ingestion/source/fivetran/fivetran.py +34 -26
  83. datahub/ingestion/source/gcs/gcs_source.py +13 -2
  84. datahub/ingestion/source/ge_data_profiler.py +76 -28
  85. datahub/ingestion/source/ge_profiling_config.py +11 -0
  86. datahub/ingestion/source/hex/api.py +26 -1
  87. datahub/ingestion/source/iceberg/iceberg.py +3 -1
  88. datahub/ingestion/source/identity/azure_ad.py +1 -1
  89. datahub/ingestion/source/identity/okta.py +1 -14
  90. datahub/ingestion/source/kafka/kafka.py +16 -0
  91. datahub/ingestion/source/kafka_connect/sink_connectors.py +156 -47
  92. datahub/ingestion/source/kafka_connect/source_connectors.py +59 -4
  93. datahub/ingestion/source/looker/looker_source.py +1 -0
  94. datahub/ingestion/source/mlflow.py +11 -1
  95. datahub/ingestion/source/mock_data/__init__.py +0 -0
  96. datahub/ingestion/source/mock_data/datahub_mock_data.py +507 -0
  97. datahub/ingestion/source/mock_data/datahub_mock_data_report.py +12 -0
  98. datahub/ingestion/source/mock_data/table_naming_helper.py +97 -0
  99. datahub/ingestion/source/nifi.py +1 -1
  100. datahub/ingestion/source/powerbi/powerbi.py +1 -5
  101. datahub/ingestion/source/powerbi/rest_api_wrapper/powerbi_api.py +0 -1
  102. datahub/ingestion/source/powerbi_report_server/report_server.py +0 -23
  103. datahub/ingestion/source/preset.py +2 -2
  104. datahub/ingestion/source/qlik_sense/qlik_sense.py +1 -0
  105. datahub/ingestion/source/redshift/redshift.py +21 -1
  106. datahub/ingestion/source/redshift/usage.py +4 -3
  107. datahub/ingestion/source/s3/report.py +4 -2
  108. datahub/ingestion/source/s3/source.py +367 -115
  109. datahub/ingestion/source/sac/sac.py +3 -1
  110. datahub/ingestion/source/salesforce.py +6 -3
  111. datahub/ingestion/source/sigma/sigma.py +7 -1
  112. datahub/ingestion/source/slack/slack.py +2 -1
  113. datahub/ingestion/source/snowflake/snowflake_config.py +30 -7
  114. datahub/ingestion/source/snowflake/snowflake_queries.py +348 -82
  115. datahub/ingestion/source/snowflake/snowflake_summary.py +5 -0
  116. datahub/ingestion/source/snowflake/snowflake_usage_v2.py +8 -2
  117. datahub/ingestion/source/snowflake/snowflake_utils.py +2 -7
  118. datahub/ingestion/source/snowflake/snowflake_v2.py +16 -2
  119. datahub/ingestion/source/snowflake/stored_proc_lineage.py +143 -0
  120. datahub/ingestion/source/sql/athena.py +119 -11
  121. datahub/ingestion/source/sql/athena_properties_extractor.py +777 -0
  122. datahub/ingestion/source/sql/clickhouse.py +3 -1
  123. datahub/ingestion/source/sql/cockroachdb.py +0 -1
  124. datahub/ingestion/source/sql/hana.py +3 -1
  125. datahub/ingestion/source/sql/hive_metastore.py +3 -11
  126. datahub/ingestion/source/sql/mariadb.py +0 -1
  127. datahub/ingestion/source/sql/mssql/source.py +239 -34
  128. datahub/ingestion/source/sql/mysql.py +0 -1
  129. datahub/ingestion/source/sql/oracle.py +1 -1
  130. datahub/ingestion/source/sql/postgres.py +0 -1
  131. datahub/ingestion/source/sql/sql_common.py +121 -34
  132. datahub/ingestion/source/sql/sql_generic_profiler.py +2 -1
  133. datahub/ingestion/source/sql/teradata.py +997 -235
  134. datahub/ingestion/source/sql/vertica.py +10 -6
  135. datahub/ingestion/source/sql_queries.py +2 -2
  136. datahub/ingestion/source/state/stateful_ingestion_base.py +1 -1
  137. datahub/ingestion/source/superset.py +58 -3
  138. datahub/ingestion/source/tableau/tableau.py +58 -37
  139. datahub/ingestion/source/tableau/tableau_common.py +4 -2
  140. datahub/ingestion/source/tableau/tableau_constant.py +0 -4
  141. datahub/ingestion/source/unity/config.py +5 -0
  142. datahub/ingestion/source/unity/proxy.py +118 -0
  143. datahub/ingestion/source/unity/source.py +195 -17
  144. datahub/ingestion/source/unity/tag_entities.py +295 -0
  145. datahub/ingestion/source/usage/clickhouse_usage.py +4 -1
  146. datahub/ingestion/source/usage/starburst_trino_usage.py +3 -0
  147. datahub/ingestion/transformer/add_dataset_ownership.py +18 -2
  148. datahub/integrations/assertion/snowflake/compiler.py +4 -3
  149. datahub/metadata/_internal_schema_classes.py +1522 -569
  150. datahub/metadata/_urns/urn_defs.py +1826 -1658
  151. datahub/metadata/com/linkedin/pegasus2avro/application/__init__.py +19 -0
  152. datahub/metadata/com/linkedin/pegasus2avro/identity/__init__.py +2 -0
  153. datahub/metadata/com/linkedin/pegasus2avro/logical/__init__.py +15 -0
  154. datahub/metadata/com/linkedin/pegasus2avro/metadata/key/__init__.py +4 -0
  155. datahub/metadata/com/linkedin/pegasus2avro/module/__init__.py +29 -0
  156. datahub/metadata/com/linkedin/pegasus2avro/settings/global/__init__.py +4 -0
  157. datahub/metadata/com/linkedin/pegasus2avro/template/__init__.py +25 -0
  158. datahub/metadata/schema.avsc +17758 -17097
  159. datahub/metadata/schemas/ApplicationKey.avsc +31 -0
  160. datahub/metadata/schemas/ApplicationProperties.avsc +72 -0
  161. datahub/metadata/schemas/Applications.avsc +38 -0
  162. datahub/metadata/schemas/ChartKey.avsc +1 -0
  163. datahub/metadata/schemas/ContainerKey.avsc +1 -0
  164. datahub/metadata/schemas/ContainerProperties.avsc +8 -0
  165. datahub/metadata/schemas/CorpUserSettings.avsc +41 -0
  166. datahub/metadata/schemas/DashboardKey.avsc +1 -0
  167. datahub/metadata/schemas/DataFlowInfo.avsc +8 -0
  168. datahub/metadata/schemas/DataFlowKey.avsc +1 -0
  169. datahub/metadata/schemas/DataHubPageModuleKey.avsc +21 -0
  170. datahub/metadata/schemas/DataHubPageModuleProperties.avsc +237 -0
  171. datahub/metadata/schemas/DataHubPageTemplateKey.avsc +21 -0
  172. datahub/metadata/schemas/DataHubPageTemplateProperties.avsc +175 -0
  173. datahub/metadata/schemas/DataHubPolicyInfo.avsc +12 -1
  174. datahub/metadata/schemas/DataJobInfo.avsc +8 -0
  175. datahub/metadata/schemas/DataJobKey.avsc +1 -0
  176. datahub/metadata/schemas/DataProcessKey.avsc +8 -0
  177. datahub/metadata/schemas/DataProductKey.avsc +1 -0
  178. datahub/metadata/schemas/DataProductProperties.avsc +1 -1
  179. datahub/metadata/schemas/DatasetKey.avsc +11 -1
  180. datahub/metadata/schemas/GlobalSettingsInfo.avsc +62 -0
  181. datahub/metadata/schemas/GlossaryTermKey.avsc +1 -0
  182. datahub/metadata/schemas/IcebergWarehouseInfo.avsc +8 -0
  183. datahub/metadata/schemas/LogicalParent.avsc +140 -0
  184. datahub/metadata/schemas/MLFeatureKey.avsc +1 -0
  185. datahub/metadata/schemas/MLFeatureTableKey.avsc +1 -0
  186. datahub/metadata/schemas/MLModelDeploymentKey.avsc +8 -0
  187. datahub/metadata/schemas/MLModelGroupKey.avsc +9 -0
  188. datahub/metadata/schemas/MLModelKey.avsc +9 -0
  189. datahub/metadata/schemas/MLPrimaryKeyKey.avsc +1 -0
  190. datahub/metadata/schemas/MetadataChangeEvent.avsc +20 -1
  191. datahub/metadata/schemas/NotebookKey.avsc +1 -0
  192. datahub/metadata/schemas/QuerySubjects.avsc +1 -12
  193. datahub/metadata/schemas/SchemaFieldKey.avsc +2 -1
  194. datahub/metadata/schemas/__init__.py +3 -3
  195. datahub/sdk/__init__.py +2 -0
  196. datahub/sdk/_all_entities.py +7 -0
  197. datahub/sdk/_shared.py +116 -0
  198. datahub/sdk/chart.py +315 -0
  199. datahub/sdk/container.py +7 -0
  200. datahub/sdk/dashboard.py +432 -0
  201. datahub/sdk/dataflow.py +7 -0
  202. datahub/sdk/datajob.py +45 -13
  203. datahub/sdk/dataset.py +8 -2
  204. datahub/sdk/entity_client.py +82 -2
  205. datahub/sdk/lineage_client.py +683 -82
  206. datahub/sdk/main_client.py +46 -16
  207. datahub/sdk/mlmodel.py +101 -38
  208. datahub/sdk/mlmodelgroup.py +7 -0
  209. datahub/sdk/search_client.py +4 -3
  210. datahub/sdk/search_filters.py +95 -27
  211. datahub/specific/chart.py +1 -1
  212. datahub/specific/dataproduct.py +4 -0
  213. datahub/sql_parsing/sql_parsing_aggregator.py +29 -17
  214. datahub/sql_parsing/sqlglot_lineage.py +62 -13
  215. datahub/telemetry/telemetry.py +17 -11
  216. datahub/testing/sdk_v2_helpers.py +7 -1
  217. datahub/upgrade/upgrade.py +56 -14
  218. datahub/utilities/server_config_util.py +8 -0
  219. datahub/utilities/sqlalchemy_query_combiner.py +5 -2
  220. datahub/utilities/stats_collections.py +4 -0
  221. {acryl_datahub-1.1.1rc4.dist-info → acryl_datahub-1.2.0.1.dist-info}/WHEEL +0 -0
  222. {acryl_datahub-1.1.1rc4.dist-info → acryl_datahub-1.2.0.1.dist-info}/licenses/LICENSE +0 -0
  223. {acryl_datahub-1.1.1rc4.dist-info → acryl_datahub-1.2.0.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,507 @@
1
+ import logging
2
+ from typing import Dict, Iterable, List, Optional, Tuple
3
+
4
+ from pydantic import Field
5
+
6
+ from datahub.configuration.common import ConfigModel
7
+ from datahub.emitter.mce_builder import make_dataset_urn
8
+ from datahub.emitter.mcp import MetadataChangeProposalWrapper
9
+ from datahub.ingestion.api.common import PipelineContext
10
+ from datahub.ingestion.api.decorators import (
11
+ SupportStatus,
12
+ config_class,
13
+ platform_name,
14
+ support_status,
15
+ )
16
+ from datahub.ingestion.api.source import Source, SourceReport
17
+ from datahub.ingestion.api.workunit import MetadataWorkUnit
18
+ from datahub.ingestion.source.common.subtypes import DatasetSubTypes
19
+ from datahub.ingestion.source.mock_data.datahub_mock_data_report import (
20
+ DataHubMockDataReport,
21
+ )
22
+ from datahub.ingestion.source.mock_data.table_naming_helper import TableNamingHelper
23
+ from datahub.metadata.schema_classes import (
24
+ CalendarIntervalClass,
25
+ DatasetLineageTypeClass,
26
+ DatasetProfileClass,
27
+ DatasetUsageStatisticsClass,
28
+ StatusClass,
29
+ SubTypesClass,
30
+ TimeWindowSizeClass,
31
+ UpstreamClass,
32
+ UpstreamLineageClass,
33
+ )
34
+ from datahub.utilities.str_enum import StrEnum
35
+
36
+ logger = logging.getLogger(__name__)
37
+
38
+
39
+ class SubTypePattern(StrEnum):
40
+ ALTERNATING = "alternating"
41
+ ALL_TABLE = "all_table"
42
+ ALL_VIEW = "all_view"
43
+ LEVEL_BASED = "level_based"
44
+
45
+
46
+ class LineageConfigGen1(ConfigModel):
47
+ """
48
+ Configuration for generating mock lineage data for testing purposes.
49
+
50
+ This configuration controls how the mock data source generates a hierarchical
51
+ lineage graph with multiple levels of upstream/downstream relationships.
52
+
53
+ The lineage graph is structured as follows:
54
+ - Level 0: 1 table (root)
55
+ - Level 1: lineage_fan_out tables (each connected to the root)
56
+ - Level 2+: If lineage_fan_out_after_first_hop is set, uses that value;
57
+ otherwise uses lineage_fan_out^level tables (each connected to a level 1 table)
58
+ - ... and so on for lineage_hops levels
59
+
60
+ Examples:
61
+ - With lineage_fan_out=2, lineage_hops=1: Creates 3 tables total
62
+ (1 root + 2 downstream) with 2 lineage relationships
63
+ - With lineage_fan_out=3, lineage_hops=2: Creates 13 tables total
64
+ (1 + 3 + 9) with 12 lineage relationships
65
+ - With lineage_fan_out=4, lineage_hops=1: Creates 5 tables total
66
+ (1 + 4) with 4 lineage relationships
67
+ - With lineage_fan_out=3, lineage_hops=3, lineage_fan_out_after_first_hop=2:
68
+ Creates 1 + 3 + 6 + 12 = 22 tables total (prevents exponential growth)
69
+
70
+ Table naming convention: "hops_{lineage_hops}_f_{lineage_fan_out}_h{level}_t{table_index}"
71
+ """
72
+
73
+ enabled: bool = Field(
74
+ default=False,
75
+ description="Whether this source is enabled",
76
+ )
77
+
78
+ table_name_prefix: Optional[str] = Field(
79
+ default=None,
80
+ description="Prefix to add to the table name. This is useful for testing purposes.",
81
+ )
82
+
83
+ emit_lineage: bool = Field(
84
+ default=True,
85
+ description="Whether to emit lineage data for testing purposes. When False, no lineage data is generated regardless of other settings.",
86
+ )
87
+ emit_usage: bool = Field(
88
+ default=True,
89
+ description="Whether to emit usage data for testing purposes. When False, no usage data is generated regardless of other settings.",
90
+ )
91
+
92
+ lineage_fan_out: int = Field(
93
+ default=3,
94
+ description="Number of downstream tables that each upstream table connects to. This controls the 'width' of the lineage graph. Higher values create more parallel downstream tables per level.",
95
+ )
96
+
97
+ lineage_hops: int = Field(
98
+ default=2,
99
+ description="Number of hops (levels) in the lineage graph. This controls the 'depth' of the lineage graph. Level 0 is the root table, and each subsequent level contains downstream tables. Higher values create deeper lineage chains.",
100
+ )
101
+
102
+ lineage_fan_out_after_first_hop: Optional[int] = Field(
103
+ default=None,
104
+ description="Optional limit on fanout for hops after the first hop. When set, prevents exponential growth by limiting the number of downstream tables per upstream table at levels 2 and beyond. When None, uses the standard exponential growth (lineage_fan_out^level).",
105
+ )
106
+
107
+ subtype_pattern: SubTypePattern = Field(
108
+ default=SubTypePattern.ALTERNATING,
109
+ description="Pattern for determining SubTypes. Options: 'alternating', 'all_table', 'all_view', 'level_based'",
110
+ )
111
+
112
+ subtype_types: List[str] = Field(
113
+ default=["Table", "View"],
114
+ description="List of types to use in alternating pattern. Defaults to ['Table', 'View'].",
115
+ )
116
+
117
+ level_subtypes: Dict[int, str] = Field(
118
+ default={0: "Table", 1: "View", 2: "Table"},
119
+ description="Mapping of level to subtype for level_based pattern",
120
+ )
121
+
122
+
123
+ class DataHubMockDataConfig(ConfigModel):
124
+ enabled: bool = Field(
125
+ default=True,
126
+ description="Whether this source is enabled",
127
+ )
128
+ throw_uncaught_exceptions: bool = Field(
129
+ default=False,
130
+ description="Whether to throw an uncaught exception for testing",
131
+ )
132
+ num_errors: int = Field(
133
+ default=0,
134
+ description="Number of errors to add in report for testing",
135
+ )
136
+ num_warnings: int = Field(
137
+ default=0,
138
+ description="Number of warnings to add in report for testing",
139
+ )
140
+
141
+ gen_1: LineageConfigGen1 = Field(
142
+ default_factory=LineageConfigGen1,
143
+ description="Configuration for lineage data generation",
144
+ )
145
+
146
+
147
+ @platform_name("DataHubMockData")
148
+ @config_class(DataHubMockDataConfig)
149
+ @support_status(SupportStatus.TESTING)
150
+ class DataHubMockDataSource(Source):
151
+ """
152
+ This source is for generating mock data for testing purposes.
153
+ Expect breaking changes as we iterate on the mock data source.
154
+ """
155
+
156
+ def __init__(self, ctx: PipelineContext, config: DataHubMockDataConfig):
157
+ self.ctx = ctx
158
+ self.config = config
159
+ self.report = DataHubMockDataReport()
160
+
161
+ def get_workunits(self) -> Iterable[MetadataWorkUnit]:
162
+ if self.config.throw_uncaught_exceptions:
163
+ raise Exception("This is a test exception")
164
+
165
+ if self.config.num_errors > 0:
166
+ for i in range(self.config.num_errors):
167
+ self.report.failure(
168
+ message="This is test error message",
169
+ title="Test Error",
170
+ context=f"This is test error {i}",
171
+ )
172
+
173
+ if self.config.num_warnings > 0:
174
+ for i in range(self.config.num_warnings):
175
+ self.report.warning(
176
+ message="This is test warning",
177
+ title="Test Warning",
178
+ context=f"This is test warning {i}",
179
+ )
180
+
181
+ # We don't want any implicit aspects to be produced
182
+ # so we are not using get_workunits_internal
183
+ if self.config.gen_1.enabled:
184
+ for wu in self._data_gen_1():
185
+ if self.report.first_urn_seen is None:
186
+ self.report.first_urn_seen = wu.get_urn()
187
+ self.report.report_workunit(wu)
188
+ yield wu
189
+
190
+ yield from []
191
+
192
+ def _calculate_lineage_tables(
193
+ self, fan_out: int, hops: int, fan_out_after_first: Optional[int] = None
194
+ ) -> Tuple[int, List[int]]:
195
+ """
196
+ Calculate the total number of tables and tables at each level for lineage generation.
197
+
198
+ Args:
199
+ fan_out: Number of downstream tables per upstream table at level 1
200
+ hops: Number of hops (levels) in the lineage graph
201
+ fan_out_after_first: Optional limit on fanout for hops after the first hop
202
+
203
+ Returns:
204
+ Tuple of (total_tables, tables_at_levels) where tables_at_levels is a list
205
+ containing the number of tables at each level (index 0 = level 0, etc.)
206
+ """
207
+ tables_to_be_created = 0
208
+ tables_at_levels: List[int] = []
209
+
210
+ for i in range(hops + 1):
211
+ if i == 0:
212
+ # Level 0: always 1 table
213
+ tables_at_level = 1
214
+ elif i == 1:
215
+ # Level 1: uses lineage_fan_out
216
+ tables_at_level = fan_out
217
+ else:
218
+ # Level 2+: use fan_out_after_first_hop if set, otherwise exponential growth
219
+ if fan_out_after_first is not None:
220
+ # Each table at previous level creates fan_out_after_first tables
221
+ tables_at_level = tables_at_levels[i - 1] * fan_out_after_first
222
+ else:
223
+ # Original exponential behavior
224
+ tables_at_level = fan_out**i
225
+
226
+ tables_at_levels.append(tables_at_level)
227
+ tables_to_be_created += tables_at_level
228
+
229
+ return tables_to_be_created, tables_at_levels
230
+
231
+ def _calculate_fanout_for_level(
232
+ self, level: int, fan_out: int, fan_out_after_first: Optional[int] = None
233
+ ) -> int:
234
+ """
235
+ Calculate the fanout (number of downstream tables) for a specific level.
236
+
237
+ Args:
238
+ level: The current level (0-based)
239
+ fan_out: Number of downstream tables per upstream table at level 1
240
+ fan_out_after_first: Optional limit on fanout for hops after the first hop
241
+
242
+ Returns:
243
+ The number of downstream tables that each table at this level should connect to
244
+ """
245
+ if level == 0:
246
+ # Level 0: uses the standard fan_out
247
+ return fan_out
248
+ else:
249
+ # Level 1+: use fan_out_after_first if set, otherwise use fan_out
250
+ return fan_out_after_first if fan_out_after_first is not None else fan_out
251
+
252
+ def _determine_subtype(
253
+ self,
254
+ table_level: int,
255
+ table_index: int,
256
+ subtype_pattern: SubTypePattern,
257
+ subtype_types: List[str],
258
+ level_subtypes: Dict[int, str],
259
+ ) -> str:
260
+ """
261
+ Determine subtype based on configured pattern.
262
+
263
+ Args:
264
+ table_level: Level of the table in the lineage graph
265
+ table_index: Index of the table within its level
266
+ subtype_pattern: Pattern for determining subtypes
267
+ subtype_types: List of types to use in alternating pattern
268
+ level_subtypes: Mapping of level to subtype for level_based pattern
269
+
270
+ Returns:
271
+ The determined subtype from the configured types
272
+ """
273
+ if subtype_pattern == SubTypePattern.ALTERNATING:
274
+ return subtype_types[table_index % len(subtype_types)]
275
+ elif subtype_pattern == SubTypePattern.LEVEL_BASED:
276
+ return level_subtypes.get(table_level, DatasetSubTypes.TABLE)
277
+ elif subtype_pattern == SubTypePattern.ALL_TABLE:
278
+ return DatasetSubTypes.TABLE
279
+ elif subtype_pattern == SubTypePattern.ALL_VIEW:
280
+ return DatasetSubTypes.VIEW
281
+ else:
282
+ return DatasetSubTypes.TABLE # default
283
+
284
+ def _get_subtypes_aspect(
285
+ self,
286
+ table_name: str,
287
+ table_level: int,
288
+ table_index: int,
289
+ subtype_pattern: SubTypePattern,
290
+ subtype_types: List[str],
291
+ level_subtypes: Dict[int, str],
292
+ ) -> MetadataWorkUnit:
293
+ """
294
+ Create a SubTypes aspect for a table based on deterministic pattern.
295
+
296
+ Args:
297
+ table_name: Name of the table
298
+ table_level: Level of the table in the lineage graph
299
+ table_index: Index of the table within its level
300
+ subtype_pattern: Pattern for determining subtypes
301
+ subtype_types: List of types to use in alternating pattern
302
+ level_subtypes: Mapping of level to subtype for level_based pattern
303
+
304
+ Returns:
305
+ MetadataWorkUnit containing the SubTypes aspect
306
+ """
307
+ # Determine subtype based on pattern
308
+ subtype = self._determine_subtype(
309
+ table_level, table_index, subtype_pattern, subtype_types, level_subtypes
310
+ )
311
+
312
+ urn = make_dataset_urn(platform="fake", name=table_name)
313
+ mcp = MetadataChangeProposalWrapper(
314
+ entityUrn=urn,
315
+ entityType="dataset",
316
+ aspect=SubTypesClass(typeNames=[subtype]),
317
+ )
318
+ return mcp.as_workunit()
319
+
320
+ def _data_gen_1(self) -> Iterable[MetadataWorkUnit]:
321
+ """Generate mock lineage data for testing purposes."""
322
+ gen_1 = self.config.gen_1
323
+ fan_out = gen_1.lineage_fan_out
324
+ hops = gen_1.lineage_hops
325
+ fan_out_after_first = gen_1.lineage_fan_out_after_first_hop
326
+
327
+ logger.info(
328
+ f"Generating lineage data with fan_out={fan_out}, hops={hops}, fan_out_after_first={fan_out_after_first}"
329
+ )
330
+
331
+ tables_to_be_created, tables_at_levels = self._calculate_lineage_tables(
332
+ fan_out, hops, fan_out_after_first
333
+ )
334
+
335
+ logger.info(f"About to create {tables_to_be_created} datasets mock data")
336
+
337
+ for i in range(hops + 1):
338
+ tables_at_level = tables_at_levels[i]
339
+
340
+ for j in range(tables_at_level):
341
+ table_name = TableNamingHelper.generate_table_name(
342
+ hops, fan_out, i, j, gen_1.table_name_prefix
343
+ )
344
+
345
+ yield self._get_status_aspect(table_name)
346
+
347
+ yield self._get_subtypes_aspect(
348
+ table_name,
349
+ i,
350
+ j,
351
+ gen_1.subtype_pattern,
352
+ gen_1.subtype_types,
353
+ gen_1.level_subtypes,
354
+ )
355
+
356
+ yield self._get_profile_aspect(table_name)
357
+
358
+ if self.config.gen_1.emit_usage:
359
+ yield self._get_usage_aspect(table_name)
360
+
361
+ if self.config.gen_1.emit_lineage:
362
+ yield from self._generate_lineage_for_table(
363
+ table_name=table_name,
364
+ table_level=i,
365
+ table_index=j,
366
+ hops=hops,
367
+ fan_out=fan_out,
368
+ fan_out_after_first=fan_out_after_first,
369
+ tables_at_levels=tables_at_levels,
370
+ table_name_prefix=gen_1.table_name_prefix,
371
+ )
372
+
373
+ def _generate_lineage_for_table(
374
+ self,
375
+ table_name: str,
376
+ table_level: int,
377
+ table_index: int,
378
+ hops: int,
379
+ fan_out: int,
380
+ fan_out_after_first: Optional[int],
381
+ tables_at_levels: List[int],
382
+ table_name_prefix: Optional[str],
383
+ ) -> Iterable[MetadataWorkUnit]:
384
+ """Generate lineage relationships for a specific table."""
385
+ # Only generate lineage if there are downstream levels
386
+ if table_level + 1 > hops:
387
+ return
388
+
389
+ current_fan_out = self._calculate_fanout_for_level(
390
+ table_level, fan_out, fan_out_after_first
391
+ )
392
+
393
+ yield from self._generate_downstream_lineage(
394
+ upstream_table_name=table_name,
395
+ upstream_table_index=table_index,
396
+ upstream_table_level=table_level,
397
+ current_fan_out=current_fan_out,
398
+ hops=hops,
399
+ fan_out=fan_out,
400
+ tables_at_levels=tables_at_levels,
401
+ table_name_prefix=table_name_prefix,
402
+ )
403
+
404
+ def _generate_downstream_lineage(
405
+ self,
406
+ upstream_table_name: str,
407
+ upstream_table_index: int,
408
+ upstream_table_level: int,
409
+ current_fan_out: int,
410
+ hops: int,
411
+ fan_out: int,
412
+ tables_at_levels: List[int],
413
+ table_name_prefix: Optional[str],
414
+ ) -> Iterable[MetadataWorkUnit]:
415
+ """Generate lineage relationships to downstream tables."""
416
+ downstream_level = upstream_table_level + 1
417
+ downstream_tables_count = tables_at_levels[downstream_level]
418
+
419
+ # Calculate range of downstream tables this upstream table connects to
420
+ start_downstream = upstream_table_index * current_fan_out
421
+ end_downstream = min(
422
+ (upstream_table_index + 1) * current_fan_out, downstream_tables_count
423
+ )
424
+
425
+ for downstream_index in range(start_downstream, end_downstream):
426
+ downstream_table_name = TableNamingHelper.generate_table_name(
427
+ hops, fan_out, downstream_level, downstream_index, table_name_prefix
428
+ )
429
+ yield self._get_upstream_aspect(
430
+ upstream_table=upstream_table_name,
431
+ downstream_table=downstream_table_name,
432
+ )
433
+
434
+ def _get_status_aspect(self, table: str) -> MetadataWorkUnit:
435
+ urn = make_dataset_urn(
436
+ platform="fake",
437
+ name=table,
438
+ )
439
+ mcp = MetadataChangeProposalWrapper(
440
+ entityUrn=urn,
441
+ entityType="dataset",
442
+ aspect=StatusClass(removed=False),
443
+ )
444
+ return mcp.as_workunit()
445
+
446
+ def _get_upstream_aspect(
447
+ self, upstream_table: str, downstream_table: str
448
+ ) -> MetadataWorkUnit:
449
+ mcp = MetadataChangeProposalWrapper(
450
+ entityUrn=make_dataset_urn(
451
+ platform="fake",
452
+ name=downstream_table,
453
+ ),
454
+ entityType="dataset",
455
+ aspect=UpstreamLineageClass(
456
+ upstreams=[
457
+ UpstreamClass(
458
+ dataset=make_dataset_urn(
459
+ platform="fake",
460
+ name=upstream_table,
461
+ ),
462
+ type=DatasetLineageTypeClass.TRANSFORMED,
463
+ )
464
+ ],
465
+ ),
466
+ )
467
+ return mcp.as_workunit()
468
+
469
+ def _get_profile_aspect(self, table: str) -> MetadataWorkUnit:
470
+ urn = make_dataset_urn(
471
+ platform="fake",
472
+ name=table,
473
+ )
474
+ mcp = MetadataChangeProposalWrapper(
475
+ entityUrn=urn,
476
+ entityType="dataset",
477
+ aspect=DatasetProfileClass(
478
+ timestampMillis=0,
479
+ rowCount=100,
480
+ columnCount=10,
481
+ sizeInBytes=1000,
482
+ ),
483
+ )
484
+ return mcp.as_workunit()
485
+
486
+ def _get_usage_aspect(self, table: str) -> MetadataWorkUnit:
487
+ urn = make_dataset_urn(
488
+ platform="fake",
489
+ name=table,
490
+ )
491
+ mcp = MetadataChangeProposalWrapper(
492
+ entityUrn=urn,
493
+ entityType="dataset",
494
+ aspect=DatasetUsageStatisticsClass(
495
+ timestampMillis=0,
496
+ eventGranularity=TimeWindowSizeClass(unit=CalendarIntervalClass.DAY),
497
+ uniqueUserCount=0,
498
+ totalSqlQueries=0,
499
+ topSqlQueries=[],
500
+ userCounts=[],
501
+ fieldCounts=[],
502
+ ),
503
+ )
504
+ return mcp.as_workunit()
505
+
506
+ def get_report(self) -> SourceReport:
507
+ return self.report
@@ -0,0 +1,12 @@
1
+ from dataclasses import dataclass, field
2
+ from typing import Optional
3
+
4
+ from datahub.ingestion.api.source import SourceReport
5
+
6
+
7
+ @dataclass
8
+ class DataHubMockDataReport(SourceReport):
9
+ first_urn_seen: Optional[str] = field(
10
+ default=None,
11
+ metadata={"description": "The first URN encountered during ingestion"},
12
+ )
@@ -0,0 +1,97 @@
1
+ from typing import Dict, Optional
2
+
3
+
4
+ class TableNamingHelper:
5
+ """
6
+ Helper class for managing table naming conventions in mock data generation.
7
+
8
+ Table naming pattern: "hops_{lineage_hops}_f_{lineage_fan_out}_h{level}_t{table_index}"
9
+ """
10
+
11
+ @staticmethod
12
+ def generate_table_name(
13
+ lineage_hops: int,
14
+ lineage_fan_out: int,
15
+ level: int,
16
+ table_index: int,
17
+ prefix: Optional[str] = None,
18
+ ) -> str:
19
+ """
20
+ Generate a table name following the standard naming convention.
21
+
22
+ Args:
23
+ lineage_hops: Total number of hops in the lineage graph
24
+ lineage_fan_out: Number of downstream tables per upstream table
25
+ level: Level of the table in the lineage graph (0-based)
26
+ table_index: Index of the table within its level (0-based)
27
+ prefix: Optional prefix to add to the table name
28
+
29
+ Returns:
30
+ Table name following the pattern: "{prefix}hops_{lineage_hops}_f_{lineage_fan_out}_h{level}_t{table_index}"
31
+ """
32
+ base_name = f"hops_{lineage_hops}_f_{lineage_fan_out}_h{level}_t{table_index}"
33
+ return f"{prefix}{base_name}" if prefix else base_name
34
+
35
+ @staticmethod
36
+ def parse_table_name(table_name: str) -> Dict[str, int]:
37
+ """
38
+ Parse a table name to extract its components.
39
+
40
+ Args:
41
+ table_name: Table name following the standard naming convention
42
+
43
+ Returns:
44
+ Dictionary containing parsed components:
45
+ - lineage_hops: Total number of hops in the lineage graph
46
+ - lineage_fan_out: Number of downstream tables per upstream table
47
+ - level: Level of the table in the lineage graph (0-based)
48
+ - table_index: Index of the table within its level (0-based)
49
+
50
+ Raises:
51
+ ValueError: If the table name doesn't follow the expected pattern
52
+ """
53
+ try:
54
+ # Expected pattern: "hops_{lineage_hops}_f_{lineage_fan_out}_h{level}_t{table_index}"
55
+ parts = table_name.split("_")
56
+
57
+ if (
58
+ len(parts) != 6
59
+ or parts[0] != "hops"
60
+ or parts[2] != "f"
61
+ or not parts[4].startswith("h")
62
+ or not parts[5].startswith("t")
63
+ ):
64
+ raise ValueError(f"Invalid table name format: {table_name}")
65
+
66
+ lineage_hops = int(parts[1])
67
+ lineage_fan_out = int(parts[3]) # lineage_fan_out is at index 3
68
+ level = int(parts[4][1:]) # Remove 'h' prefix from parts[4]
69
+ table_index = int(parts[5][1:]) # Remove 't' prefix from parts[5]
70
+
71
+ return {
72
+ "lineage_hops": lineage_hops,
73
+ "lineage_fan_out": lineage_fan_out,
74
+ "level": level,
75
+ "table_index": table_index,
76
+ }
77
+ except (ValueError, IndexError) as e:
78
+ raise ValueError(
79
+ f"Failed to parse table name '{table_name}': {str(e)}"
80
+ ) from e
81
+
82
+ @staticmethod
83
+ def is_valid_table_name(table_name: str) -> bool:
84
+ """
85
+ Check if a table name follows the expected naming convention.
86
+
87
+ Args:
88
+ table_name: Table name to validate
89
+
90
+ Returns:
91
+ True if the table name follows the expected pattern, False otherwise
92
+ """
93
+ try:
94
+ TableNamingHelper.parse_table_name(table_name)
95
+ return True
96
+ except ValueError:
97
+ return False
@@ -72,7 +72,7 @@ NIFI = "nifi"
72
72
  # and here - https://github.com/psf/requests/issues/1573
73
73
  class SSLAdapter(HTTPAdapter):
74
74
  def __init__(self, certfile, keyfile, password=None):
75
- self.context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
75
+ self.context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
76
76
  self.context.load_cert_chain(
77
77
  certfile=certfile, keyfile=keyfile, password=password
78
78
  )
@@ -294,8 +294,6 @@ class Mapper:
294
294
  logger.debug(f"Dataset urn = {ds_urn} and its lineage = {upstream_lineage}")
295
295
 
296
296
  mcp = MetadataChangeProposalWrapper(
297
- entityType=Constant.DATASET,
298
- changeType=ChangeTypeClass.UPSERT,
299
297
  entityUrn=ds_urn,
300
298
  aspect=upstream_lineage_class,
301
299
  )
@@ -538,9 +536,7 @@ class Mapper:
538
536
  profile.columnCount = table.column_count
539
537
 
540
538
  mcp = MetadataChangeProposalWrapper(
541
- entityType="dataset",
542
539
  entityUrn=ds_urn,
543
- aspectName="datasetProfile",
544
540
  aspect=profile,
545
541
  )
546
542
  dataset_mcps.append(mcp)
@@ -796,7 +792,6 @@ class Mapper:
796
792
  guid=container_key.guid(),
797
793
  )
798
794
  mcp = MetadataChangeProposalWrapper(
799
- changeType=ChangeTypeClass.UPSERT,
800
795
  entityUrn=entity_urn,
801
796
  aspect=ContainerClass(container=f"{container_urn}"),
802
797
  )
@@ -1253,6 +1248,7 @@ class Mapper:
1253
1248
  SourceCapability.DATA_PROFILING,
1254
1249
  "Optionally enabled via configuration profiling.enabled",
1255
1250
  )
1251
+ @capability(SourceCapability.TEST_CONNECTION, "Enabled by default")
1256
1252
  class PowerBiDashboardSource(StatefulIngestionSourceBase, TestableSource):
1257
1253
  """
1258
1254
  This plugin extracts the following:
@@ -673,7 +673,6 @@ class PowerBiAPI:
673
673
  fill_dashboard_tags()
674
674
  self._fill_independent_datasets(workspace=workspace)
675
675
 
676
- # flake8: noqa: C901
677
676
  def fill_workspaces(
678
677
  self, workspaces: List[Workspace], reporter: PowerBiDashboardSourceReport
679
678
  ) -> Iterable[Workspace]: