acryl-datahub 1.2.0.7rc3__py3-none-any.whl → 1.2.0.7rc4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub might be problematic. Click here for more details.

@@ -0,0 +1,321 @@
1
+ """
2
+ Proxy authentication patch for databricks-sql < 3.0 compatibility.
3
+
4
+ This module provides proxy authentication fixes for databricks-sql connector < 3.0
5
+ to resolve "407 Proxy Authentication Required" errors that occur even when
6
+ proxy environment variables are correctly set.
7
+
8
+ The patch implements the same fix as Databricks PR #354:
9
+ https://github.com/databricks/databricks-sql-python/pull/354
10
+ """
11
+
12
+ import logging
13
+ import os
14
+ import urllib.parse
15
+ from typing import Dict, Optional
16
+
17
+ logger: logging.Logger = logging.getLogger(__name__)
18
+
19
+ PROXY_VARS = ["HTTP_PROXY", "HTTPS_PROXY", "http_proxy", "https_proxy"]
20
+
21
+
22
+ def mask_proxy_credentials(url: Optional[str]) -> str:
23
+ """Mask credentials in proxy URL for safe logging."""
24
+ if not url:
25
+ return "None"
26
+
27
+ try:
28
+ parsed = urllib.parse.urlparse(url)
29
+ if parsed.username:
30
+ # Replace credentials with masked version
31
+ masked_netloc = parsed.netloc
32
+ if parsed.username and parsed.password:
33
+ masked_netloc = masked_netloc.replace(
34
+ f"{parsed.username}:{parsed.password}@", f"{parsed.username}:***@"
35
+ )
36
+ elif parsed.username:
37
+ masked_netloc = masked_netloc.replace(
38
+ f"{parsed.username}@", f"{parsed.username}:***@"
39
+ )
40
+
41
+ return urllib.parse.urlunparse(
42
+ (
43
+ parsed.scheme,
44
+ masked_netloc,
45
+ parsed.path,
46
+ parsed.params,
47
+ parsed.query,
48
+ parsed.fragment,
49
+ )
50
+ )
51
+ else:
52
+ return url
53
+ except Exception:
54
+ return "***INVALID_URL***"
55
+
56
+
57
+ def _ensure_thrift_imports():
58
+ """Ensure required thrift imports are loaded before accessing thrift_http_client.
59
+
60
+ The databricks-sql thrift_http_client requires thrift.transport.THttpClient.THttpClient
61
+ to be accessible. This is achieved by importing the required modules in the right order.
62
+ """
63
+ try:
64
+ # Import thrift submodules - this makes them accessible as attributes
65
+ import thrift.transport.THttpClient # noqa: F401 # Used to make thrift.transport accessible
66
+
67
+ logger.debug("Successfully imported required thrift modules")
68
+ except Exception as e:
69
+ logger.debug(f"Could not import thrift modules: {e}")
70
+ raise
71
+
72
+
73
+ def _log_proxy_environment():
74
+ """Log detected proxy environment variables for debugging."""
75
+ proxy_env_vars = {}
76
+ for var in PROXY_VARS:
77
+ value = os.environ.get(var)
78
+ if value:
79
+ masked_value = mask_proxy_credentials(value)
80
+ proxy_env_vars[var] = masked_value
81
+
82
+ if proxy_env_vars:
83
+ logger.info(f"Detected proxy environment variables: {proxy_env_vars}")
84
+ else:
85
+ logger.debug("No proxy environment variables detected")
86
+
87
+
88
+ def _basic_proxy_auth_header(proxy_url: str) -> Optional[Dict[str, str]]:
89
+ """Create proxy authentication header using the same method as Databricks >= 3.0.
90
+
91
+ Based on the basic_proxy_auth_header method from databricks-sql-connector >= 3.0:
92
+ https://github.com/databricks/databricks-sql-python/pull/354
93
+ """
94
+ try:
95
+ from urllib3.util import make_headers
96
+
97
+ parsed = urllib.parse.urlparse(proxy_url)
98
+ if parsed.username and parsed.password:
99
+ # Code reused from https://github.com/databricks/databricks-sql-python/pull/354
100
+ # URL decode the username and password (same as Databricks method)
101
+ username = urllib.parse.unquote(parsed.username)
102
+ password = urllib.parse.unquote(parsed.password)
103
+ auth_string = f"{username}:{password}"
104
+
105
+ # Create proxy URL without credentials
106
+ proxy_host_port = f"{parsed.scheme}://{parsed.hostname}"
107
+ if parsed.port:
108
+ proxy_host_port += f":{parsed.port}"
109
+
110
+ # Code reused from https://github.com/databricks/databricks-sql-python/pull/354
111
+ # Use make_headers like the newer Databricks version does
112
+ proxy_headers = make_headers(proxy_basic_auth=auth_string)
113
+
114
+ return {
115
+ "proxy_url": proxy_host_port,
116
+ "proxy_headers": proxy_headers,
117
+ "auth_string": auth_string, # Keep for backward compatibility with tests
118
+ }
119
+ except Exception as e:
120
+ logger.debug(f"Failed to create proxy auth header from URL {proxy_url}: {e}")
121
+
122
+ return None
123
+
124
+
125
+ def _handle_proxy_connection(self, original_open, pool_kwargs):
126
+ """Handle proxy connection setup with authentication headers."""
127
+ from urllib3.poolmanager import ProxyManager
128
+
129
+ logger.info(f"Using proxy for connection to {self.host}:{self.port}")
130
+ proxy_uri = getattr(self, "proxy_uri", None)
131
+ logger.debug(
132
+ f"Proxy URI: {mask_proxy_credentials(proxy_uri) if proxy_uri else 'None'}"
133
+ )
134
+
135
+ # Compute proxy authentication headers properly (the bug fix!)
136
+ proxy_headers = None
137
+ proxy_env_found = None
138
+ for env_var in ["HTTPS_PROXY", "https_proxy", "HTTP_PROXY", "http_proxy"]:
139
+ proxy_url = os.environ.get(env_var)
140
+ if proxy_url:
141
+ logger.debug(
142
+ f"Found proxy URL in {env_var}: {mask_proxy_credentials(proxy_url)}"
143
+ )
144
+ auth_info = _basic_proxy_auth_header(proxy_url)
145
+ if auth_info:
146
+ proxy_headers = auth_info["proxy_headers"]
147
+ proxy_env_found = env_var
148
+ logger.debug(f"Successfully created proxy headers from {env_var}")
149
+ break
150
+ else:
151
+ logger.debug(
152
+ f"No authentication info found in proxy URL from {env_var}"
153
+ )
154
+
155
+ if proxy_headers:
156
+ logger.info(f"Using proxy authentication headers from {proxy_env_found}")
157
+ else:
158
+ logger.warning(
159
+ "No proxy authentication headers could be created from environment variables"
160
+ )
161
+
162
+ proxy_manager = ProxyManager(
163
+ self.proxy_uri,
164
+ num_pools=1,
165
+ proxy_headers=proxy_headers,
166
+ )
167
+
168
+ # Validate proxy manager attributes
169
+ if not hasattr(self, "realhost") or not hasattr(self, "realport"):
170
+ logger.warning(
171
+ "THttpClient missing realhost/realport attributes, falling back to original"
172
+ )
173
+ return original_open(self)
174
+
175
+ # Set up the connection pool
176
+ self._THttpClient__pool = proxy_manager.connection_from_host(
177
+ host=self.realhost,
178
+ port=self.realport,
179
+ scheme=self.scheme,
180
+ pool_kwargs=pool_kwargs, # type: ignore
181
+ )
182
+ logger.debug(f"Created proxy connection pool for {self.realhost}:{self.realport}")
183
+
184
+
185
+ def _create_patched_open_method(original_open):
186
+ """Create the patched THttpClient.open method with proxy authentication fix."""
187
+
188
+ def patched_open(self):
189
+ """Patched version of THttpClient.open following databricks-sql >= 3.0 structure.
190
+
191
+ This is largely copied from the >= 3.0 implementation:
192
+ https://github.com/databricks/databricks-sql-python/pull/354/files
193
+ """
194
+ logger.debug(
195
+ f"Patched THttpClient.open called for host={getattr(self, 'host', 'unknown')}, scheme={getattr(self, 'scheme', 'unknown')}"
196
+ )
197
+
198
+ try:
199
+ # Validate required attributes
200
+ required_attrs = ["scheme", "host", "port", "max_connections"]
201
+ missing_attrs = [attr for attr in required_attrs if not hasattr(self, attr)]
202
+ if missing_attrs:
203
+ logger.warning(
204
+ f"THttpClient missing required attributes: {missing_attrs}, falling back to original"
205
+ )
206
+ return original_open(self)
207
+
208
+ # Code structure reused from https://github.com/databricks/databricks-sql-python/pull/354
209
+ # Determine pool class based on scheme
210
+ if self.scheme == "http":
211
+ from urllib3 import HTTPConnectionPool
212
+
213
+ pool_class = HTTPConnectionPool
214
+ elif self.scheme == "https":
215
+ from urllib3 import HTTPSConnectionPool
216
+
217
+ pool_class = HTTPSConnectionPool
218
+ else:
219
+ logger.warning(
220
+ f"Unknown scheme '{self.scheme}', falling back to original"
221
+ )
222
+ return original_open(self)
223
+
224
+ _pool_kwargs = {"maxsize": self.max_connections}
225
+ logger.debug(f"Pool kwargs: {_pool_kwargs}")
226
+
227
+ if self.using_proxy():
228
+ return _handle_proxy_connection(self, original_open, _pool_kwargs)
229
+ else:
230
+ logger.debug(f"Direct connection (no proxy) to {self.host}:{self.port}")
231
+ self._THttpClient__pool = pool_class(
232
+ self.host, self.port, **_pool_kwargs
233
+ )
234
+
235
+ logger.debug("Patched THttpClient.open completed successfully")
236
+
237
+ except Exception as e:
238
+ logger.warning(
239
+ f"Error in proxy auth patch: {e}, falling back to original",
240
+ exc_info=True,
241
+ )
242
+ # Fallback to original implementation
243
+ try:
244
+ return original_open(self)
245
+ except Exception as fallback_error:
246
+ logger.error(
247
+ f"Fallback to original THttpClient.open also failed: {fallback_error}",
248
+ exc_info=True,
249
+ )
250
+ raise
251
+
252
+ return patched_open
253
+
254
+
255
+ def apply_databricks_proxy_fix():
256
+ """Apply the databricks-sql < 3.0 proxy authentication fix at module import time.
257
+
258
+ This implements the same fix as Databricks PR #354 to resolve
259
+ "407 Proxy Authentication Required" errors that occur even when
260
+ all proxy environment variables are correctly set.
261
+
262
+ Note: This fix may not work with all thrift versions due to compatibility issues
263
+ between databricks-sql-connector 2.9.6 and newer thrift versions. The fix will
264
+ gracefully fail with a warning if thrift compatibility issues are detected.
265
+ The main SQL functionality will continue to work normally without this fix.
266
+ """
267
+ _log_proxy_environment()
268
+ logger.info("Applying databricks-sql proxy authentication fix...")
269
+
270
+ try:
271
+ _ensure_thrift_imports()
272
+ import databricks.sql.auth.thrift_http_client as thrift_http
273
+
274
+ # Store original method for fallback
275
+ original_open = getattr(thrift_http.THttpClient, "open", None)
276
+ if not original_open:
277
+ logger.warning("Could not find THttpClient.open method to patch")
278
+ return False
279
+
280
+ logger.debug(f"Found THttpClient.open method at {original_open}")
281
+
282
+ # Apply the patch
283
+ patched_open = _create_patched_open_method(original_open)
284
+ thrift_http.THttpClient.open = patched_open
285
+ logger.info("Successfully applied databricks-sql proxy authentication fix")
286
+
287
+ # Verify the patch was applied
288
+ current_method = getattr(thrift_http.THttpClient, "open", None)
289
+ if current_method == patched_open:
290
+ logger.debug(
291
+ "Patch verification successful: THttpClient.open is now the patched version"
292
+ )
293
+ return True
294
+ else:
295
+ logger.warning(
296
+ "Patch verification failed: THttpClient.open was not replaced correctly"
297
+ )
298
+ return False
299
+
300
+ except ImportError as e:
301
+ logger.debug(f"Could not import databricks-sql internals for proxy patch: {e}")
302
+ return False
303
+ except AttributeError as e:
304
+ if "thrift" in str(e).lower() and "transport" in str(e).lower():
305
+ warning_msg = (
306
+ f"Databricks-sql proxy authentication patch could not be applied due to thrift version incompatibility: {e}. "
307
+ "In most environments, the SQL connection will still work without this patch."
308
+ )
309
+ logger.warning(warning_msg)
310
+ # Import here to avoid circular imports
311
+ from datahub.utilities.global_warning_util import add_global_warning
312
+
313
+ add_global_warning(warning_msg)
314
+ else:
315
+ logger.error(
316
+ f"Failed to apply databricks-sql proxy patch: {e}", exc_info=True
317
+ )
318
+ return False
319
+ except Exception as e:
320
+ logger.error(f"Failed to apply databricks-sql proxy patch: {e}", exc_info=True)
321
+ return False
@@ -257,6 +257,18 @@ class UnityCatalogSource(StatefulIngestionSourceBase, TestableSource):
257
257
  else:
258
258
  self.platform_resource_repository = None
259
259
 
260
+ if self.config._forced_disable_tag_extraction:
261
+ self.report.report_warning(
262
+ "Some features disabled because of configuration conflicts",
263
+ "Tag Extraction is disabled due to missing warehouse_id in config",
264
+ )
265
+
266
+ if self.config._forced_disable_hive_metastore_extraction:
267
+ self.report.report_warning(
268
+ "Some features disabled because of configuration conflicts",
269
+ "Hive Metastore Extraction is disabled due to missing warehouse_id in config",
270
+ )
271
+
260
272
  # Include platform resource repository in report for automatic cache statistics
261
273
  if self.config.include_tags and self.platform_resource_repository:
262
274
  self.report.tag_urn_resolver_cache = self.platform_resource_repository
@@ -268,6 +268,7 @@ class UsageAggregator(Generic[ResourceType]):
268
268
  user,
269
269
  query,
270
270
  fields,
271
+ user_email_pattern=self.config.user_email_pattern,
271
272
  count=count,
272
273
  )
273
274
 
datahub/sdk/chart.py CHANGED
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
  from datetime import datetime
4
4
  from typing import Dict, List, Optional, Type, Union
5
5
 
6
+ from deprecated.sphinx import deprecated
6
7
  from typing_extensions import Self
7
8
 
8
9
  import datahub.metadata.schema_classes as models
@@ -23,11 +24,13 @@ from datahub.sdk._shared import (
23
24
  HasTerms,
24
25
  LinksInputType,
25
26
  OwnersInputType,
27
+ ParentContainerInputType,
26
28
  TagsInputType,
27
29
  TermsInputType,
28
30
  )
29
31
  from datahub.sdk.dataset import Dataset
30
32
  from datahub.sdk.entity import Entity, ExtraAspectsType
33
+ from datahub.utilities.sentinels import Unset, unset
31
34
 
32
35
 
33
36
  class Chart(
@@ -70,14 +73,15 @@ class Chart(
70
73
  last_refreshed: Optional[datetime] = None,
71
74
  chart_type: Optional[Union[str, models.ChartTypeClass]] = None,
72
75
  access: Optional[str] = None,
76
+ input_datasets: Optional[List[Union[DatasetUrnOrStr, Dataset]]] = None,
73
77
  # Standard aspects.
78
+ parent_container: ParentContainerInputType | Unset = unset,
74
79
  subtype: Optional[str] = None,
75
80
  owners: Optional[OwnersInputType] = None,
76
81
  links: Optional[LinksInputType] = None,
77
82
  tags: Optional[TagsInputType] = None,
78
83
  terms: Optional[TermsInputType] = None,
79
84
  domain: Optional[DomainInputType] = None,
80
- input_datasets: Optional[List[Union[DatasetUrnOrStr, Dataset]]] = None,
81
85
  extra_aspects: ExtraAspectsType = None,
82
86
  ):
83
87
  """Initialize a new Chart instance."""
@@ -91,19 +95,31 @@ class Chart(
91
95
 
92
96
  self._set_platform_instance(platform, platform_instance)
93
97
 
94
- # Set additional properties
98
+ self._ensure_chart_props(display_name=display_name)
99
+
100
+ if display_name is not None:
101
+ self.set_display_name(display_name)
102
+ if description is not None:
103
+ self.set_description(description)
95
104
  if external_url is not None:
96
105
  self.set_external_url(external_url)
97
106
  if chart_url is not None:
98
107
  self.set_chart_url(chart_url)
99
108
  if custom_properties is not None:
100
109
  self.set_custom_properties(custom_properties)
110
+ if last_modified is not None:
111
+ self.set_last_modified(last_modified)
101
112
  if last_refreshed is not None:
102
113
  self.set_last_refreshed(last_refreshed)
103
114
  if chart_type is not None:
104
115
  self.set_chart_type(chart_type)
105
116
  if access is not None:
106
117
  self.set_access(access)
118
+ if input_datasets is not None:
119
+ self.set_input_datasets(input_datasets)
120
+
121
+ if parent_container is not unset:
122
+ self._set_container(parent_container)
107
123
  if subtype is not None:
108
124
  self.set_subtype(subtype)
109
125
  if owners is not None:
@@ -116,14 +132,6 @@ class Chart(
116
132
  self.set_terms(terms)
117
133
  if domain is not None:
118
134
  self.set_domain(domain)
119
- if last_modified is not None:
120
- self.set_last_modified(last_modified)
121
- if input_datasets is not None:
122
- self.set_input_datasets(input_datasets)
123
- if description is not None:
124
- self.set_description(description)
125
- if display_name is not None:
126
- self.set_display_name(display_name)
127
135
 
128
136
  @classmethod
129
137
  def _new_from_graph(cls, urn: Urn, current_aspects: models.AspectBag) -> Self:
@@ -139,11 +147,13 @@ class Chart(
139
147
  assert isinstance(self._urn, ChartUrn)
140
148
  return self._urn
141
149
 
142
- def _ensure_chart_props(self) -> models.ChartInfoClass:
150
+ def _ensure_chart_props(
151
+ self, display_name: Optional[str] = None
152
+ ) -> models.ChartInfoClass:
143
153
  """Ensure chart properties exist, using a safer approach."""
144
154
  return self._setdefault_aspect(
145
155
  models.ChartInfoClass(
146
- title=self.urn.chart_id,
156
+ title=display_name or self.urn.chart_id,
147
157
  description="",
148
158
  lastModified=models.ChangeAuditStampsClass(),
149
159
  )
@@ -155,13 +165,15 @@ class Chart(
155
165
  return self.urn.chart_id
156
166
 
157
167
  @property
168
+ @deprecated("Use display_name instead", version="1.2.0.7")
158
169
  def title(self) -> str:
159
- """Get the title of the chart."""
160
- return self._ensure_chart_props().title
170
+ """Get the display name of the chart."""
171
+ return self.display_name
161
172
 
173
+ @deprecated("Use set_display_name instead", version="1.2.0.7")
162
174
  def set_title(self, title: str) -> None:
163
- """Set the title of the chart."""
164
- self._ensure_chart_props().title = title
175
+ """Set the display name of the chart."""
176
+ self.set_display_name(title)
165
177
 
166
178
  @property
167
179
  def description(self) -> Optional[str]:
@@ -173,13 +185,13 @@ class Chart(
173
185
  self._ensure_chart_props().description = description
174
186
 
175
187
  @property
176
- def display_name(self) -> Optional[str]:
188
+ def display_name(self) -> str:
177
189
  """Get the display name of the chart."""
178
- return self.title
190
+ return self._ensure_chart_props().title
179
191
 
180
192
  def set_display_name(self, display_name: str) -> None:
181
193
  """Set the display name of the chart."""
182
- self.set_title(display_name)
194
+ self._ensure_chart_props().title = display_name
183
195
 
184
196
  @property
185
197
  def external_url(self) -> Optional[str]:
@@ -250,9 +262,11 @@ class Chart(
250
262
  def set_chart_type(self, chart_type: Union[str, models.ChartTypeClass]) -> None:
251
263
  """Set the type of the chart."""
252
264
  if isinstance(chart_type, str):
253
- assert chart_type in get_enum_options(models.ChartTypeClass), (
254
- f"Invalid chart type: {chart_type}"
255
- )
265
+ chart_type_options = get_enum_options(models.ChartTypeClass)
266
+ if chart_type not in chart_type_options:
267
+ raise ValueError(
268
+ f"Invalid chart type: {chart_type}; valid types are {chart_type_options}"
269
+ )
256
270
  self._ensure_chart_props().type = chart_type
257
271
 
258
272
  @property