acryl-datahub 1.0.0.3rc9__py3-none-any.whl → 1.0.0.3rc10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub might be problematic. Click here for more details.

Files changed (54) hide show
  1. {acryl_datahub-1.0.0.3rc9.dist-info → acryl_datahub-1.0.0.3rc10.dist-info}/METADATA +2480 -2480
  2. {acryl_datahub-1.0.0.3rc9.dist-info → acryl_datahub-1.0.0.3rc10.dist-info}/RECORD +54 -54
  3. {acryl_datahub-1.0.0.3rc9.dist-info → acryl_datahub-1.0.0.3rc10.dist-info}/WHEEL +1 -1
  4. datahub/_version.py +1 -1
  5. datahub/api/circuit_breaker/operation_circuit_breaker.py +2 -2
  6. datahub/api/entities/datajob/dataflow.py +3 -3
  7. datahub/api/entities/forms/forms.py +34 -35
  8. datahub/api/graphql/assertion.py +1 -1
  9. datahub/api/graphql/operation.py +4 -4
  10. datahub/cli/delete_cli.py +1 -1
  11. datahub/cli/docker_cli.py +2 -2
  12. datahub/configuration/source_common.py +1 -1
  13. datahub/emitter/request_helper.py +116 -3
  14. datahub/emitter/rest_emitter.py +44 -52
  15. datahub/ingestion/api/source.py +2 -5
  16. datahub/ingestion/glossary/classification_mixin.py +4 -2
  17. datahub/ingestion/graph/client.py +3 -1
  18. datahub/ingestion/graph/config.py +1 -0
  19. datahub/ingestion/graph/filters.py +1 -1
  20. datahub/ingestion/source/aws/sagemaker_processors/feature_groups.py +1 -1
  21. datahub/ingestion/source/bigquery_v2/bigquery.py +24 -23
  22. datahub/ingestion/source/cassandra/cassandra_profiling.py +25 -24
  23. datahub/ingestion/source/datahub/datahub_database_reader.py +12 -11
  24. datahub/ingestion/source/dbt/dbt_cloud.py +2 -6
  25. datahub/ingestion/source/dbt/dbt_common.py +10 -2
  26. datahub/ingestion/source/dbt/dbt_core.py +82 -42
  27. datahub/ingestion/source/feast.py +4 -4
  28. datahub/ingestion/source/iceberg/iceberg_common.py +2 -2
  29. datahub/ingestion/source/ldap.py +1 -1
  30. datahub/ingestion/source/looker/looker_lib_wrapper.py +1 -1
  31. datahub/ingestion/source/looker/lookml_source.py +7 -1
  32. datahub/ingestion/source/mode.py +74 -28
  33. datahub/ingestion/source/neo4j/neo4j_source.py +85 -55
  34. datahub/ingestion/source/powerbi/config.py +1 -1
  35. datahub/ingestion/source/powerbi/rest_api_wrapper/data_resolver.py +2 -2
  36. datahub/ingestion/source/redshift/usage.py +10 -9
  37. datahub/ingestion/source/sql/clickhouse.py +5 -1
  38. datahub/ingestion/source/sql/druid.py +7 -2
  39. datahub/ingestion/source/sql/oracle.py +6 -2
  40. datahub/ingestion/source/tableau/tableau_validation.py +1 -1
  41. datahub/ingestion/source/usage/clickhouse_usage.py +7 -3
  42. datahub/ingestion/source/usage/starburst_trino_usage.py +5 -3
  43. datahub/metadata/{_schema_classes.py → _internal_schema_classes.py} +490 -490
  44. datahub/metadata/_urns/urn_defs.py +1786 -1786
  45. datahub/metadata/schema.avsc +17364 -16988
  46. datahub/metadata/schema_classes.py +3 -3
  47. datahub/metadata/schemas/__init__.py +3 -3
  48. datahub/testing/check_imports.py +1 -1
  49. datahub/utilities/logging_manager.py +8 -1
  50. datahub/utilities/sqlalchemy_query_combiner.py +4 -5
  51. datahub/utilities/urn_encoder.py +1 -1
  52. {acryl_datahub-1.0.0.3rc9.dist-info → acryl_datahub-1.0.0.3rc10.dist-info}/entry_points.txt +0 -0
  53. {acryl_datahub-1.0.0.3rc9.dist-info → acryl_datahub-1.0.0.3rc10.dist-info}/licenses/LICENSE +0 -0
  54. {acryl_datahub-1.0.0.3rc9.dist-info → acryl_datahub-1.0.0.3rc10.dist-info}/top_level.txt +0 -0
@@ -20,16 +20,16 @@ from datahub.utilities._custom_package_loader import get_custom_models_package
20
20
  _custom_package_path = get_custom_models_package()
21
21
 
22
22
  if TYPE_CHECKING or not _custom_package_path:
23
- from ._schema_classes import *
23
+ from ._internal_schema_classes import *
24
24
 
25
25
  # Required explicitly because __all__ doesn't include _ prefixed names.
26
- from ._schema_classes import __SCHEMA_TYPES
26
+ from ._internal_schema_classes import __SCHEMA_TYPES
27
27
 
28
28
  if IS_SPHINX_BUILD:
29
29
  # Set __module__ to the current module so that Sphinx will document the
30
30
  # classes as belonging to this module instead of the custom package.
31
31
  for _cls in list(globals().values()):
32
- if hasattr(_cls, "__module__") and "datahub.metadata._schema_classes" in _cls.__module__:
32
+ if hasattr(_cls, "__module__") and "datahub.metadata._internal_schema_classes" in _cls.__module__:
33
33
  _cls.__module__ = __name__
34
34
  else:
35
35
  _custom_package = importlib.import_module(_custom_package_path)
@@ -15,10 +15,10 @@ import pathlib
15
15
  def _load_schema(schema_name: str) -> str:
16
16
  return (pathlib.Path(__file__).parent / f"{schema_name}.avsc").read_text()
17
17
 
18
- def getMetadataChangeProposalSchema() -> str:
19
- return _load_schema("MetadataChangeProposal")
20
-
21
18
  def getMetadataChangeEventSchema() -> str:
22
19
  return _load_schema("MetadataChangeEvent")
23
20
 
21
+ def getMetadataChangeProposalSchema() -> str:
22
+ return _load_schema("MetadataChangeProposal")
23
+
24
24
  # fmt: on
@@ -9,7 +9,7 @@ def ensure_no_indirect_model_imports(dirs: List[pathlib.Path]) -> None:
9
9
  # If our needs become more complex, we should move to a proper linter.
10
10
  denied_imports = {
11
11
  "src.": "datahub.*",
12
- "datahub.metadata._schema_classes": "datahub.metadata.schema_classes",
12
+ "datahub.metadata._internal_schema_classes": "datahub.metadata.schema_classes",
13
13
  "datahub.metadata._urns": "datahub.metadata.urns",
14
14
  }
15
15
  ignored_files = {
@@ -161,6 +161,7 @@ class _LogBuffer:
161
161
  self._buffer: Deque[str] = collections.deque(maxlen=maxlen)
162
162
 
163
163
  def write(self, line: str) -> None:
164
+ # We do not expect `line` to have a trailing newline.
164
165
  if len(line) > IN_MEMORY_LOG_BUFFER_MAX_LINE_LENGTH:
165
166
  line = line[:IN_MEMORY_LOG_BUFFER_MAX_LINE_LENGTH] + "[truncated]"
166
167
 
@@ -188,7 +189,13 @@ class _BufferLogHandler(logging.Handler):
188
189
  message = self.format(record)
189
190
  except TypeError as e:
190
191
  message = f"Error formatting log message: {e}\nMessage: {record.msg}, Args: {record.args}"
191
- self._storage.write(message)
192
+
193
+ # For exception stack traces, the message is split over multiple lines,
194
+ # but we store it as a single string. Because we truncate based on line
195
+ # length, it's better for us to split it into multiple lines so that we
196
+ # don't lose any information on deeper stack traces.
197
+ for line in message.split("\n"):
198
+ self._storage.write(line)
192
199
 
193
200
 
194
201
  def _remove_all_handlers(logger: logging.Logger) -> None:
@@ -272,11 +272,10 @@ class SQLAlchemyQueryCombiner:
272
272
  self.report.uncombined_queries_issued += 1
273
273
  return _sa_execute_underlying_method(conn, query, *args, **kwargs)
274
274
 
275
- with _sa_execute_method_patching_lock:
276
- with unittest.mock.patch(
277
- "sqlalchemy.engine.Connection.execute", _sa_execute_fake
278
- ):
279
- yield self
275
+ with _sa_execute_method_patching_lock, unittest.mock.patch(
276
+ "sqlalchemy.engine.Connection.execute", _sa_execute_fake
277
+ ):
278
+ yield self
280
279
 
281
280
  def run(self, method: Callable[[], None]) -> None:
282
281
  """
@@ -4,7 +4,7 @@ from typing import List
4
4
  # NOTE: Frontend relies on encoding these three characters. Specifically, we decode and encode schema fields for column level lineage.
5
5
  # If this changes, make appropriate changes to datahub-web-react/src/app/lineage/utils/columnLineageUtils.ts
6
6
  # We also rely on encoding these exact three characters when generating schemaField urns in our graphQL layer. Update SchemaFieldUtils if this changes.
7
- # Also see https://datahubproject.io/docs/what/urn/#restrictions
7
+ # Also see https://docs.datahub.com/docs/what/urn/#restrictions
8
8
  RESERVED_CHARS = {",", "(", ")", "␟"}
9
9
  RESERVED_CHARS_EXTENDED = RESERVED_CHARS.union({"%"})
10
10