acryl-datahub 1.0.0.2rc4__py3-none-any.whl → 1.0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub might be problematic. Click here for more details.

Files changed (159) hide show
  1. {acryl_datahub-1.0.0.2rc4.dist-info → acryl_datahub-1.0.0.3.dist-info}/METADATA +2566 -2514
  2. {acryl_datahub-1.0.0.2rc4.dist-info → acryl_datahub-1.0.0.3.dist-info}/RECORD +159 -149
  3. {acryl_datahub-1.0.0.2rc4.dist-info → acryl_datahub-1.0.0.3.dist-info}/WHEEL +1 -1
  4. datahub/_version.py +1 -1
  5. datahub/api/circuit_breaker/operation_circuit_breaker.py +2 -2
  6. datahub/api/entities/datacontract/datacontract.py +35 -3
  7. datahub/api/entities/datajob/dataflow.py +3 -3
  8. datahub/api/entities/datajob/datajob.py +7 -4
  9. datahub/api/entities/dataset/dataset.py +9 -11
  10. datahub/api/entities/forms/forms.py +34 -34
  11. datahub/api/graphql/assertion.py +1 -1
  12. datahub/api/graphql/operation.py +4 -4
  13. datahub/cli/check_cli.py +3 -2
  14. datahub/cli/config_utils.py +2 -2
  15. datahub/cli/delete_cli.py +6 -5
  16. datahub/cli/docker_cli.py +2 -2
  17. datahub/cli/exists_cli.py +2 -1
  18. datahub/cli/get_cli.py +2 -1
  19. datahub/cli/iceberg_cli.py +6 -5
  20. datahub/cli/ingest_cli.py +9 -6
  21. datahub/cli/migrate.py +4 -3
  22. datahub/cli/migration_utils.py +4 -3
  23. datahub/cli/put_cli.py +3 -2
  24. datahub/cli/specific/assertions_cli.py +2 -1
  25. datahub/cli/specific/datacontract_cli.py +3 -2
  26. datahub/cli/specific/dataproduct_cli.py +10 -9
  27. datahub/cli/specific/dataset_cli.py +4 -3
  28. datahub/cli/specific/forms_cli.py +2 -1
  29. datahub/cli/specific/group_cli.py +2 -1
  30. datahub/cli/specific/structuredproperties_cli.py +4 -3
  31. datahub/cli/specific/user_cli.py +2 -1
  32. datahub/cli/state_cli.py +2 -1
  33. datahub/cli/timeline_cli.py +2 -1
  34. datahub/configuration/common.py +5 -0
  35. datahub/configuration/source_common.py +1 -1
  36. datahub/emitter/mcp.py +20 -5
  37. datahub/emitter/request_helper.py +116 -3
  38. datahub/emitter/rest_emitter.py +163 -93
  39. datahub/entrypoints.py +2 -1
  40. datahub/errors.py +4 -0
  41. datahub/ingestion/api/auto_work_units/auto_ensure_aspect_size.py +2 -1
  42. datahub/ingestion/api/source.py +2 -5
  43. datahub/ingestion/api/source_helpers.py +1 -0
  44. datahub/ingestion/glossary/classification_mixin.py +4 -2
  45. datahub/ingestion/graph/client.py +33 -8
  46. datahub/ingestion/graph/config.py +14 -0
  47. datahub/ingestion/graph/filters.py +1 -1
  48. datahub/ingestion/graph/links.py +53 -0
  49. datahub/ingestion/run/pipeline.py +9 -6
  50. datahub/ingestion/run/pipeline_config.py +1 -1
  51. datahub/ingestion/sink/datahub_rest.py +5 -6
  52. datahub/ingestion/source/apply/datahub_apply.py +2 -1
  53. datahub/ingestion/source/aws/sagemaker_processors/feature_groups.py +1 -1
  54. datahub/ingestion/source/bigquery_v2/bigquery.py +24 -23
  55. datahub/ingestion/source/bigquery_v2/bigquery_config.py +4 -62
  56. datahub/ingestion/source/bigquery_v2/bigquery_connection.py +70 -0
  57. datahub/ingestion/source/bigquery_v2/bigquery_queries.py +3 -1
  58. datahub/ingestion/source/cassandra/cassandra_profiling.py +25 -24
  59. datahub/ingestion/source/common/subtypes.py +3 -0
  60. datahub/ingestion/source/datahub/datahub_database_reader.py +12 -11
  61. datahub/ingestion/source/dbt/dbt_cloud.py +2 -6
  62. datahub/ingestion/source/dbt/dbt_common.py +10 -2
  63. datahub/ingestion/source/dbt/dbt_core.py +82 -42
  64. datahub/ingestion/source/dynamodb/dynamodb.py +7 -4
  65. datahub/ingestion/source/feast.py +4 -4
  66. datahub/ingestion/source/fivetran/config.py +1 -1
  67. datahub/ingestion/source/fivetran/fivetran_log_api.py +7 -3
  68. datahub/ingestion/source/fivetran/fivetran_query.py +16 -16
  69. datahub/ingestion/source/ge_data_profiler.py +27 -1
  70. datahub/ingestion/source/hex/api.py +1 -20
  71. datahub/ingestion/source/hex/query_fetcher.py +4 -1
  72. datahub/ingestion/source/iceberg/iceberg.py +20 -4
  73. datahub/ingestion/source/iceberg/iceberg_common.py +2 -2
  74. datahub/ingestion/source/ldap.py +1 -1
  75. datahub/ingestion/source/looker/looker_common.py +17 -2
  76. datahub/ingestion/source/looker/looker_lib_wrapper.py +1 -1
  77. datahub/ingestion/source/looker/looker_source.py +34 -5
  78. datahub/ingestion/source/looker/lookml_source.py +7 -1
  79. datahub/ingestion/source/metadata/lineage.py +2 -1
  80. datahub/ingestion/source/mlflow.py +19 -6
  81. datahub/ingestion/source/mode.py +74 -28
  82. datahub/ingestion/source/neo4j/neo4j_source.py +85 -55
  83. datahub/ingestion/source/powerbi/config.py +13 -1
  84. datahub/ingestion/source/powerbi/m_query/data_classes.py +1 -0
  85. datahub/ingestion/source/powerbi/m_query/odbc.py +185 -0
  86. datahub/ingestion/source/powerbi/m_query/pattern_handler.py +153 -0
  87. datahub/ingestion/source/powerbi/rest_api_wrapper/data_resolver.py +2 -2
  88. datahub/ingestion/source/redshift/usage.py +10 -9
  89. datahub/ingestion/source/sigma/config.py +74 -6
  90. datahub/ingestion/source/sigma/sigma.py +16 -1
  91. datahub/ingestion/source/sigma/sigma_api.py +99 -58
  92. datahub/ingestion/source/slack/slack.py +4 -52
  93. datahub/ingestion/source/snowflake/snowflake_config.py +2 -12
  94. datahub/ingestion/source/snowflake/snowflake_connection.py +24 -18
  95. datahub/ingestion/source/snowflake/snowflake_profiler.py +1 -6
  96. datahub/ingestion/source/snowflake/snowflake_queries.py +18 -4
  97. datahub/ingestion/source/snowflake/snowflake_query.py +9 -63
  98. datahub/ingestion/source/snowflake/snowflake_tag.py +4 -1
  99. datahub/ingestion/source/sql/athena.py +2 -1
  100. datahub/ingestion/source/sql/clickhouse.py +5 -1
  101. datahub/ingestion/source/sql/druid.py +7 -2
  102. datahub/ingestion/source/sql/hive.py +7 -2
  103. datahub/ingestion/source/sql/hive_metastore.py +5 -5
  104. datahub/ingestion/source/sql/mssql/source.py +1 -1
  105. datahub/ingestion/source/sql/oracle.py +6 -2
  106. datahub/ingestion/source/sql/sql_config.py +1 -34
  107. datahub/ingestion/source/sql/sqlalchemy_uri.py +36 -0
  108. datahub/ingestion/source/sql/stored_procedures/base.py +12 -1
  109. datahub/ingestion/source/sql/two_tier_sql_source.py +1 -1
  110. datahub/ingestion/source/state_provider/datahub_ingestion_checkpointing_provider.py +2 -1
  111. datahub/ingestion/source/tableau/tableau.py +31 -6
  112. datahub/ingestion/source/tableau/tableau_validation.py +1 -1
  113. datahub/ingestion/source/unity/config.py +2 -1
  114. datahub/ingestion/source/usage/clickhouse_usage.py +7 -3
  115. datahub/ingestion/source/usage/starburst_trino_usage.py +5 -3
  116. datahub/ingestion/source/vertexai/vertexai.py +316 -4
  117. datahub/ingestion/source/vertexai/vertexai_result_type_utils.py +23 -2
  118. datahub/integrations/assertion/common.py +3 -2
  119. datahub/metadata/{_schema_classes.py → _internal_schema_classes.py} +538 -493
  120. datahub/metadata/_urns/urn_defs.py +1819 -1763
  121. datahub/metadata/com/linkedin/pegasus2avro/metadata/key/__init__.py +2 -0
  122. datahub/metadata/schema.avsc +17296 -16883
  123. datahub/metadata/schema_classes.py +3 -3
  124. datahub/metadata/schemas/DataContractKey.avsc +2 -1
  125. datahub/metadata/schemas/DataHubOpenAPISchemaKey.avsc +22 -0
  126. datahub/metadata/schemas/DataTransformLogic.avsc +4 -2
  127. datahub/metadata/schemas/FormInfo.avsc +5 -0
  128. datahub/metadata/schemas/MLModelDeploymentProperties.avsc +3 -0
  129. datahub/metadata/schemas/MetadataChangeEvent.avsc +6 -0
  130. datahub/metadata/schemas/MetadataChangeLog.avsc +3 -0
  131. datahub/metadata/schemas/MetadataChangeProposal.avsc +3 -0
  132. datahub/metadata/schemas/QueryProperties.avsc +4 -2
  133. datahub/metadata/schemas/SystemMetadata.avsc +86 -0
  134. datahub/metadata/schemas/__init__.py +3 -3
  135. datahub/sdk/_all_entities.py +4 -0
  136. datahub/sdk/_shared.py +142 -4
  137. datahub/sdk/_utils.py +4 -0
  138. datahub/sdk/dataset.py +2 -2
  139. datahub/sdk/entity_client.py +8 -0
  140. datahub/sdk/lineage_client.py +235 -0
  141. datahub/sdk/main_client.py +6 -3
  142. datahub/sdk/mlmodel.py +301 -0
  143. datahub/sdk/mlmodelgroup.py +233 -0
  144. datahub/secret/datahub_secret_store.py +2 -1
  145. datahub/specific/dataset.py +12 -0
  146. datahub/sql_parsing/fingerprint_utils.py +6 -0
  147. datahub/sql_parsing/sql_parsing_aggregator.py +48 -34
  148. datahub/sql_parsing/sqlglot_utils.py +18 -14
  149. datahub/telemetry/telemetry.py +2 -2
  150. datahub/testing/check_imports.py +1 -1
  151. datahub/testing/mcp_diff.py +15 -2
  152. datahub/upgrade/upgrade.py +10 -12
  153. datahub/utilities/logging_manager.py +8 -1
  154. datahub/utilities/server_config_util.py +350 -10
  155. datahub/utilities/sqlalchemy_query_combiner.py +4 -5
  156. datahub/utilities/urn_encoder.py +1 -1
  157. {acryl_datahub-1.0.0.2rc4.dist-info → acryl_datahub-1.0.0.3.dist-info}/entry_points.txt +0 -0
  158. {acryl_datahub-1.0.0.2rc4.dist-info → acryl_datahub-1.0.0.3.dist-info}/licenses/LICENSE +0 -0
  159. {acryl_datahub-1.0.0.2rc4.dist-info → acryl_datahub-1.0.0.3.dist-info}/top_level.txt +0 -0
@@ -1,23 +1,363 @@
1
- from typing import Any, Dict, Optional
1
+ import logging
2
+ import re
3
+ from dataclasses import dataclass, field
4
+ from enum import Enum
5
+ from typing import (
6
+ Any,
7
+ Dict,
8
+ Optional,
9
+ Tuple,
10
+ Union,
11
+ )
2
12
 
13
+ import requests
14
+
15
+ from datahub.configuration.common import (
16
+ ConfigurationError,
17
+ )
3
18
  from datahub.telemetry.telemetry import suppress_telemetry
4
19
 
20
+ logger = logging.getLogger(__name__)
21
+
5
22
  # Only to be written to for logging server related information
6
23
  global_debug: Dict[str, Any] = {}
7
24
 
8
25
 
9
- def set_gms_config(config: Dict) -> Any:
26
+ def get_gms_config() -> Dict:
27
+ return global_debug.get("gms_config", {})
28
+
29
+
30
+ class ServiceFeature(Enum):
31
+ """
32
+ Enum representing supported features in the REST service.
33
+ """
34
+
35
+ OPEN_API_SDK = "openapi_sdk"
36
+ API_TRACING = "api_tracing"
37
+ NO_CODE = "no_code"
38
+ STATEFUL_INGESTION = "stateful_ingestion"
39
+ IMPACT_ANALYSIS = "impact_analysis"
40
+ PATCH_CAPABLE = "patch_capable"
41
+ CLI_TELEMETRY = "cli_telemetry"
42
+ DATAHUB_CLOUD = "datahub_cloud"
43
+ # Add more features as needed
44
+
45
+
46
+ _REQUIRED_VERSION_OPENAPI_TRACING = {
47
+ "cloud": (0, 3, 11, 0),
48
+ "core": (1, 0, 1, 0),
49
+ }
50
+
51
+
52
+ @dataclass
53
+ class RestServiceConfig:
54
+ """
55
+ A class to represent REST service configuration with semantic version parsing capabilities.
56
+ """
57
+
58
+ session: Optional[requests.Session] = None
59
+ url: Optional[str] = None
60
+ raw_config: Dict[str, Any] = field(default_factory=dict)
61
+ _version_cache: Optional[Tuple[int, int, int, int]] = None
62
+
63
+ def fetch_config(self) -> Dict[str, Any]:
64
+ """
65
+ Fetch configuration from the server if not already loaded.
66
+
67
+ Returns:
68
+ The configuration dictionary
69
+
70
+ Raises:
71
+ ConfigurationError: If there's an error fetching or validating the configuration
72
+ """
73
+ if not self.raw_config:
74
+ if self.session is None or self.url is None:
75
+ raise ConfigurationError(
76
+ "Session and URL are required to load configuration"
77
+ )
78
+
79
+ response = self.session.get(self.url)
80
+
81
+ if response.status_code == 200:
82
+ config = response.json()
83
+
84
+ # Validate that we're connected to the correct service
85
+ if config.get("noCode") == "true":
86
+ self.raw_config = config
87
+ else:
88
+ raise ConfigurationError(
89
+ "You seem to have connected to the frontend service instead of the GMS endpoint. "
90
+ "The rest emitter should connect to DataHub GMS (usually <datahub-gms-host>:8080) or Frontend GMS API (usually <frontend>:9002/api/gms). "
91
+ "For Acryl users, the endpoint should be https://<name>.acryl.io/gms"
92
+ )
93
+ else:
94
+ logger.debug(
95
+ f"Unable to connect to {self.url} with status_code: {response.status_code}. Response: {response.text}"
96
+ )
97
+
98
+ if response.status_code == 401:
99
+ message = f"Unable to connect to {self.url} - got an authentication error: {response.text}."
100
+ else:
101
+ message = f"Unable to connect to {self.url} with status_code: {response.status_code}."
102
+
103
+ message += "\nPlease check your configuration and make sure you are talking to the DataHub GMS (usually <datahub-gms-host>:8080) or Frontend GMS API (usually <frontend>:9002/api/gms)."
104
+ raise ConfigurationError(message)
105
+
106
+ return self.raw_config
107
+
108
+ @property
109
+ def config(self) -> Dict[str, Any]:
110
+ """
111
+ Get the full configuration dictionary, loading it if necessary.
112
+
113
+ Returns:
114
+ The configuration dictionary
115
+ """
116
+ return self.fetch_config()
117
+
118
+ @property
119
+ def commit_hash(self) -> Optional[str]:
120
+ """
121
+ Get the commit hash for the current version.
122
+
123
+ Returns:
124
+ The commit hash or None if not found
125
+ """
126
+ versions = self.config.get("versions") or {}
127
+ datahub_info = versions.get("acryldata/datahub") or {}
128
+ return datahub_info.get("commit")
129
+
130
+ @property
131
+ def server_type(self) -> str:
132
+ """
133
+ Get the server type.
134
+
135
+ Returns:
136
+ The server type or "unknown" if not found
137
+ """
138
+ datahub = self.config.get("datahub") or {}
139
+ return datahub.get("serverType", "unknown")
140
+
141
+ @property
142
+ def service_version(self) -> Optional[str]:
143
+ """
144
+ Get the raw service version string.
145
+
146
+ Returns:
147
+ The version string or None if not found
148
+ """
149
+ config = self.fetch_config()
150
+ versions = config.get("versions") or {}
151
+ datahub_info = versions.get("acryldata/datahub") or {}
152
+ return datahub_info.get("version")
153
+
154
+ def _parse_version(
155
+ self, version_str: Optional[str] = None
156
+ ) -> Tuple[int, int, int, int]:
157
+ """
158
+ Parse a semantic version string into its components, ignoring rc and suffixes.
159
+ Supports standard three-part versions (1.0.0) and four-part versions (1.0.0.1).
160
+
161
+ Args:
162
+ version_str: Version string to parse. If None, uses the service version.
163
+
164
+ Returns:
165
+ Tuple of (major, minor, patch, build) version numbers where build is 0 for three-part versions
166
+
167
+ Raises:
168
+ ValueError: If the version string cannot be parsed
169
+ """
170
+ if version_str is None:
171
+ version_str = self.service_version
172
+
173
+ if not version_str:
174
+ return (0, 0, 0, 0)
175
+
176
+ # Remove 'v' prefix if present
177
+ if version_str.startswith("v"):
178
+ version_str = version_str[1:]
179
+
180
+ # Extract the semantic version part (before any rc or suffix)
181
+ # This pattern will match both three-part (1.0.0) and four-part (1.0.0.1) versions
182
+ match = re.match(r"(\d+)\.(\d+)\.(\d+)(?:\.(\d+))?(?:rc\d+|-.*)?", version_str)
183
+ if not match:
184
+ raise ValueError(f"Invalid version format: {version_str}")
185
+
186
+ major = int(match.group(1))
187
+ minor = int(match.group(2))
188
+ patch = int(match.group(3))
189
+ build = (
190
+ int(match.group(4)) if match.group(4) else 0
191
+ ) # Default to 0 if not present
192
+
193
+ return (major, minor, patch, build)
194
+
195
+ @property
196
+ def parsed_version(self) -> Optional[Tuple[int, int, int, int]]:
197
+ """
198
+ Get the parsed semantic version of the service.
199
+ Uses caching for efficiency.
200
+
201
+ Returns:
202
+ Tuple of (major, minor, patch) version numbers
203
+ """
204
+ if self._version_cache is None:
205
+ self._version_cache = self._parse_version()
206
+ return self._version_cache
207
+
208
+ def is_version_at_least(
209
+ self, major: int, minor: int = 0, patch: int = 0, build: int = 0
210
+ ) -> bool:
211
+ """
212
+ Check if the service version is at least the specified version.
213
+
214
+ Args:
215
+ major: Major version to check against
216
+ minor: Minor version to check against
217
+ patch: Patch version to check against
218
+ build: Build version to check against (for four-part versions)
219
+
220
+ Returns:
221
+ True if the service version is at least the specified version
222
+ """
223
+ current_version = self.parsed_version or (0, 0, 0, 0)
224
+ requested_version = (major, minor, patch, build)
225
+
226
+ return current_version >= requested_version
227
+
228
+ @property
229
+ def is_no_code_enabled(self) -> bool:
230
+ """
231
+ Check if noCode is enabled.
232
+
233
+ Returns:
234
+ True if noCode is set to "true"
235
+ """
236
+ return self.config.get("noCode") == "true"
237
+
238
+ @property
239
+ def is_managed_ingestion_enabled(self) -> bool:
240
+ """
241
+ Check if managedIngestion is enabled.
242
+
243
+ Returns:
244
+ True if managedIngestion.enabled is True
245
+ """
246
+ managed_ingestion = self.config.get("managedIngestion") or {}
247
+ return managed_ingestion.get("enabled", False)
248
+
249
+ @property
250
+ def is_datahub_cloud(self) -> bool:
251
+ """
252
+ Check if DataHub Cloud is enabled.
253
+
254
+ Returns:
255
+ True if the server environment is not 'core'
256
+ """
257
+ datahub_config = self.config.get("datahub") or {}
258
+ server_env = datahub_config.get("serverEnv")
259
+
260
+ # Return False if serverEnv is None or empty string
261
+ if not server_env:
262
+ return False
263
+
264
+ return server_env != "core"
265
+
266
+ def supports_feature(self, feature: ServiceFeature) -> bool:
267
+ """
268
+ Determines whether a specific feature is supported based on service version
269
+ and whether this is a cloud deployment or not.
270
+
271
+ Args:
272
+ feature: Feature enum value to check
273
+
274
+ Returns:
275
+ Boolean indicating whether the feature is supported
276
+ """
277
+ # Special handling for features that rely on config flags
278
+ config_based_features = {
279
+ ServiceFeature.NO_CODE: lambda: self.is_no_code_enabled,
280
+ ServiceFeature.STATEFUL_INGESTION: lambda: self.config.get(
281
+ "statefulIngestionCapable", False
282
+ )
283
+ is True,
284
+ ServiceFeature.IMPACT_ANALYSIS: lambda: self.config.get(
285
+ "supportsImpactAnalysis", False
286
+ )
287
+ is True,
288
+ ServiceFeature.PATCH_CAPABLE: lambda: self.config.get("patchCapable", False)
289
+ is True,
290
+ ServiceFeature.CLI_TELEMETRY: lambda: (
291
+ self.config.get("telemetry") or {}
292
+ ).get("enabledCli", None),
293
+ ServiceFeature.DATAHUB_CLOUD: lambda: self.is_datahub_cloud,
294
+ }
295
+
296
+ # Check if this is a config-based feature
297
+ if feature in config_based_features:
298
+ return config_based_features[feature]()
299
+
300
+ # For environment-based features, determine requirements based on cloud vs. non-cloud
301
+ deployment_type = "cloud" if self.is_datahub_cloud else "core"
302
+
303
+ # Define feature requirements
304
+ feature_requirements = {
305
+ ServiceFeature.OPEN_API_SDK: _REQUIRED_VERSION_OPENAPI_TRACING,
306
+ ServiceFeature.API_TRACING: _REQUIRED_VERSION_OPENAPI_TRACING,
307
+ # Additional features can be defined here
308
+ }
309
+
310
+ # Check if the feature exists in our requirements dictionary
311
+ if feature not in feature_requirements:
312
+ # Unknown feature, assume not supported
313
+ return False
314
+
315
+ # Get version requirements for this feature and deployment type
316
+ feature_reqs = feature_requirements[feature]
317
+ requirements = feature_reqs.get(deployment_type)
318
+
319
+ if not requirements:
320
+ # If no specific requirements defined for this deployment type,
321
+ # assume feature is not supported
322
+ return False
323
+
324
+ # Check if the current version meets the requirements
325
+ req_major, req_minor, req_patch, req_build = requirements
326
+ return self.is_version_at_least(req_major, req_minor, req_patch, req_build)
327
+
328
+ def __str__(self) -> str:
329
+ """
330
+ Return a string representation of the configuration as JSON.
331
+
332
+ Returns:
333
+ A string representation of the configuration dictionary
334
+ """
335
+ return str(self.config)
336
+
337
+ def __repr__(self) -> str:
338
+ """
339
+ Return a representation of the object that can be used to recreate it.
340
+
341
+ Returns:
342
+ A string representation that can be used with pprint
343
+ """
344
+ return str(self.config)
345
+
346
+
347
+ def set_gms_config(config: Union[Dict[str, Any], RestServiceConfig]) -> None:
10
348
  global_debug["gms_config"] = config
11
349
 
12
- cli_telemtry_enabled = is_cli_telemetry_enabled()
13
- if cli_telemtry_enabled is not None and not cli_telemtry_enabled:
350
+ config_obj = (
351
+ config
352
+ if isinstance(config, RestServiceConfig)
353
+ else RestServiceConfig(raw_config=config)
354
+ )
355
+
356
+ cli_telemetry_enabled = is_cli_telemetry_enabled(config_obj)
357
+ if cli_telemetry_enabled is not None and not cli_telemetry_enabled:
14
358
  # server requires telemetry to be disabled on client
15
359
  suppress_telemetry()
16
360
 
17
361
 
18
- def get_gms_config() -> Dict:
19
- return global_debug.get("gms_config", {})
20
-
21
-
22
- def is_cli_telemetry_enabled() -> Optional[bool]:
23
- return get_gms_config().get("telemetry", {}).get("enabledCli", None)
362
+ def is_cli_telemetry_enabled(config: RestServiceConfig) -> bool:
363
+ return config.supports_feature(ServiceFeature.CLI_TELEMETRY)
@@ -272,11 +272,10 @@ class SQLAlchemyQueryCombiner:
272
272
  self.report.uncombined_queries_issued += 1
273
273
  return _sa_execute_underlying_method(conn, query, *args, **kwargs)
274
274
 
275
- with _sa_execute_method_patching_lock:
276
- with unittest.mock.patch(
277
- "sqlalchemy.engine.Connection.execute", _sa_execute_fake
278
- ):
279
- yield self
275
+ with _sa_execute_method_patching_lock, unittest.mock.patch(
276
+ "sqlalchemy.engine.Connection.execute", _sa_execute_fake
277
+ ):
278
+ yield self
280
279
 
281
280
  def run(self, method: Callable[[], None]) -> None:
282
281
  """
@@ -4,7 +4,7 @@ from typing import List
4
4
  # NOTE: Frontend relies on encoding these three characters. Specifically, we decode and encode schema fields for column level lineage.
5
5
  # If this changes, make appropriate changes to datahub-web-react/src/app/lineage/utils/columnLineageUtils.ts
6
6
  # We also rely on encoding these exact three characters when generating schemaField urns in our graphQL layer. Update SchemaFieldUtils if this changes.
7
- # Also see https://datahubproject.io/docs/what/urn/#restrictions
7
+ # Also see https://docs.datahub.com/docs/what/urn/#restrictions
8
8
  RESERVED_CHARS = {",", "(", ")", "␟"}
9
9
  RESERVED_CHARS_EXTENDED = RESERVED_CHARS.union({"%"})
10
10