databricks-labs-lakebridge 0.10.1__py3-none-any.whl → 0.10.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,2 +1,2 @@
1
1
  # DO NOT MODIFY THIS FILE
2
- __version__ = "0.10.1"
2
+ __version__ = "0.10.2"
@@ -51,14 +51,3 @@ def get_sql_file(input_path: str | Path) -> Generator[Path, None, None]:
51
51
  for filename in files:
52
52
  if is_sql_file(filename):
53
53
  yield filename
54
-
55
-
56
- def read_file(filename: str | Path) -> str:
57
- """
58
- Reads the contents of the given file and returns it as a string.
59
- :param filename: Input File Path
60
- :return: File Contents as String
61
- """
62
- # pylint: disable=unspecified-encoding
63
- with Path(filename).open() as file:
64
- return file.read()
@@ -1,37 +1,3 @@
1
- import codecs
2
-
3
-
4
- # Optionally check to see if a string begins with a Byte Order Mark
5
- # such a character will cause the transpiler to fail
6
- def remove_bom(input_string: str) -> str:
7
- """
8
- Removes the Byte Order Mark (BOM) from the given string if it exists.
9
- :param input_string: String to remove BOM from
10
- :return: String without BOM
11
- """
12
- output_string = input_string
13
-
14
- # Check and remove UTF-16 (LE and BE) BOM
15
- if input_string.startswith(codecs.BOM_UTF16_BE.decode("utf-16-be")):
16
- output_string = input_string[len(codecs.BOM_UTF16_BE.decode("utf-16-be")) :]
17
- elif input_string.startswith(codecs.BOM_UTF16_LE.decode("utf-16-le")):
18
- output_string = input_string[len(codecs.BOM_UTF16_LE.decode("utf-16-le")) :]
19
- elif input_string.startswith(codecs.BOM_UTF16.decode("utf-16")):
20
- output_string = input_string[len(codecs.BOM_UTF16.decode("utf-16")) :]
21
- # Check and remove UTF-32 (LE and BE) BOM
22
- elif input_string.startswith(codecs.BOM_UTF32_BE.decode("utf-32-be")):
23
- output_string = input_string[len(codecs.BOM_UTF32_BE.decode("utf-32-be")) :]
24
- elif input_string.startswith(codecs.BOM_UTF32_LE.decode("utf-32-le")):
25
- output_string = input_string[len(codecs.BOM_UTF32_LE.decode("utf-32-le")) :]
26
- elif input_string.startswith(codecs.BOM_UTF32.decode("utf-32")):
27
- output_string = input_string[len(codecs.BOM_UTF32.decode("utf-32")) :]
28
- # Check and remove UTF-8 BOM
29
- elif input_string.startswith(codecs.BOM_UTF8.decode("utf-8")):
30
- output_string = input_string[len(codecs.BOM_UTF8.decode("utf-8")) :]
31
-
32
- return output_string
33
-
34
-
35
1
  def refactor_hexadecimal_chars(input_string: str) -> str:
36
2
  """
37
3
  Updates the HexaDecimal characters ( \x1b[\\d+m ) in the given string as below.
@@ -1,11 +1,9 @@
1
1
  import logging
2
2
  from pathlib import Path
3
3
 
4
- from databricks.labs.lakebridge.helpers.file_utils import (
5
- get_sql_file,
6
- is_sql_file,
7
- read_file,
8
- )
4
+ from databricks.labs.blueprint.paths import read_text
5
+
6
+ from databricks.labs.lakebridge.helpers.file_utils import get_sql_file, is_sql_file
9
7
  from databricks.labs.lakebridge.intermediate.dag import DAG
10
8
 
11
9
  from databricks.labs.lakebridge.transpiler.sqlglot.sqlglot_engine import SqlglotEngine
@@ -26,14 +24,14 @@ class RootTableAnalyzer:
26
24
  # when input is sql file then parse the file
27
25
  if is_sql_file(self.input_path):
28
26
  logger.debug(f"Generating Lineage file: {self.input_path}")
29
- sql_content = read_file(self.input_path)
27
+ sql_content = read_text(self.input_path)
30
28
  self._populate_dag(sql_content, self.input_path, dag)
31
29
  return dag # return after processing the file
32
30
 
33
31
  # when the input is a directory
34
32
  for path in get_sql_file(self.input_path):
35
33
  logger.debug(f"Generating Lineage file: {path}")
36
- sql_content = read_file(path)
34
+ sql_content = read_text(path)
37
35
  self._populate_dag(sql_content, path, dag)
38
36
 
39
37
  return dag
@@ -9,6 +9,7 @@ from typing import cast
9
9
  import itertools
10
10
 
11
11
  from databricks.labs.blueprint.installation import JsonObject
12
+ from databricks.labs.blueprint.paths import read_text
12
13
  from databricks.labs.lakebridge.__about__ import __version__
13
14
  from databricks.labs.lakebridge.config import (
14
15
  TranspileConfig,
@@ -28,7 +29,6 @@ from databricks.labs.lakebridge.transpiler.transpile_status import (
28
29
  ErrorKind,
29
30
  ErrorSeverity,
30
31
  )
31
- from databricks.labs.lakebridge.helpers.string_utils import remove_bom
32
32
  from databricks.labs.lakebridge.helpers.validation import Validator
33
33
  from databricks.labs.lakebridge.transpiler.sqlglot.sqlglot_engine import SqlglotEngine
34
34
  from databricks.sdk import WorkspaceClient
@@ -62,15 +62,14 @@ async def _process_one_file(context: TranspilingContext) -> tuple[int, list[Tran
62
62
  )
63
63
  return 0, [error]
64
64
 
65
- with context.input_path.open("r") as f:
66
- source_code = remove_bom(f.read())
67
- context = dataclasses.replace(context, source_code=source_code)
65
+ source_code = read_text(context.input_path)
66
+ context = dataclasses.replace(context, source_code=source_code)
68
67
 
69
68
  transpile_result = await _transpile(
70
69
  context.transpiler,
71
70
  str(context.config.source_dialect),
72
71
  context.config.target_dialect,
73
- str(context.source_code),
72
+ source_code,
74
73
  context.input_path,
75
74
  )
76
75
 
@@ -158,7 +157,9 @@ def _process_single_result(context: TranspilingContext, error_list: list[Transpi
158
157
 
159
158
  output_path = cast(Path, context.output_path)
160
159
  with output_path.open("w") as w:
161
- w.write(make_header(context.input_path, error_list))
160
+ # The above adds a java-style comment block at the top of the output file
161
+ # This would break .py or .json outputs so we disable it for now.
162
+ # w.write(make_header(context.input_path, error_list))
162
163
  w.write(output_code)
163
164
 
164
165
  logger.info(f"Processed file: {context.input_path} (errors: {len(error_list)})")
@@ -523,9 +523,7 @@ class LSPEngine(TranspileEngine):
523
523
  self.close_document(file_path)
524
524
  return ChangeManager.apply(source_code, response.changes, response.diagnostics, file_path)
525
525
 
526
- def open_document(self, file_path: Path, encoding="utf-8", source_code: str | None = None) -> None:
527
- if source_code is None:
528
- source_code = file_path.read_text(encoding)
526
+ def open_document(self, file_path: Path, source_code: str) -> None:
529
527
  text_document = TextDocumentItem(
530
528
  uri=file_path.as_uri(), language_id=LanguageKind.Sql, version=1, text=source_code
531
529
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: databricks-labs-lakebridge
3
- Version: 0.10.1
3
+ Version: 0.10.2
4
4
  Summary: Fast and predictable migrations to Databricks Lakehouse Platform. This tool is designed to help you migrate your data and workloads to the Databricks Lakehouse Platform in a fast, predictable, and reliable way. It provides a set of tools and utilities to help you reconcile your data and workloads, assess your current state, and plan your migration.
5
5
  Project-URL: Documentation, https://databrickslabs.github.io/lakebridge
6
6
  Project-URL: Issues, https://github.com/databrickslabs/lakebridge/issues
@@ -25,8 +25,8 @@ Classifier: Topic :: Software Development :: Libraries
25
25
  Classifier: Topic :: Utilities
26
26
  Requires-Python: >=3.10
27
27
  Requires-Dist: cryptography<45.1.0,>=44.0.2
28
- Requires-Dist: databricks-bb-analyzer~=0.1.7
29
- Requires-Dist: databricks-labs-blueprint[yaml]<0.12.0,>=0.11.0
28
+ Requires-Dist: databricks-bb-analyzer~=0.1.8
29
+ Requires-Dist: databricks-labs-blueprint[yaml]<0.12.0,>=0.11.1
30
30
  Requires-Dist: databricks-labs-lsql==0.16.0
31
31
  Requires-Dist: databricks-sdk~=0.51.0
32
32
  Requires-Dist: duckdb~=1.2.2
@@ -2,11 +2,12 @@ docs/lakebridge/src/components/Button.tsx,sha256=5l_irZl4AGwK7k1e2rdOb_W2-305Q1m
2
2
  docs/lakebridge/src/css/custom.css,sha256=-XnDdVlHqJZXJmKarH7zCUMnnlAfpxIpZyr8FNJ4q0A,4024
3
3
  docs/lakebridge/src/css/table.css,sha256=_MAyY7hyhfFrSNVAvCA2QlqdbeBi4Kr9Ue93bSyhKSE,315
4
4
  docs/lakebridge/src/pages/index.tsx,sha256=fQRA9ZbKsPxZbXuSa1LMDk1xfYg2YXCFgsgzqus0NLc,1789
5
+ docs/lakebridge/src/theme/DocSidebarItems/index.tsx,sha256=3FHRUOXJtjQk-caU_xAmmJWPC3E-H_ZZySV23tzqz3A,1334
5
6
  docs/lakebridge/src/theme/Footer/index.tsx,sha256=Jj8zY5WDiTLXwF_mAgld8Dh1A3MY1HFVVSYIoUs51so,1057
6
7
  docs/lakebridge/src/theme/Layout/index.tsx,sha256=IkdLr13jKmLxT0jWQqrwqrjVXc8Rwd_kWNpTd1t2sc0,592
7
8
  databricks/__init__.py,sha256=YqH8Hy8lHJxd0hLMZF6kWirUDdPiX90LRDX6S6yTMn0,261
8
9
  databricks/labs/__init__.py,sha256=YqH8Hy8lHJxd0hLMZF6kWirUDdPiX90LRDX6S6yTMn0,261
9
- databricks/labs/lakebridge/__about__.py,sha256=yME1bXdQJp7l83rLxzUY__9sQJos-JhiNDfB3KbW3WA,49
10
+ databricks/labs/lakebridge/__about__.py,sha256=1jZlIfQdn0z-MHgWnSw-KUImADj-pX_Qj-zRbJOsOb8,49
10
11
  databricks/labs/lakebridge/__init__.py,sha256=nUNECqNvyfpT0aeWwlqG0ADT8U8ScCLb8WWpLydppcA,464
11
12
  databricks/labs/lakebridge/base_install.py,sha256=8NxXsNpgqXnuADKXVFh5oQL3osdvygRMY1amJwKfU08,490
12
13
  databricks/labs/lakebridge/cli.py,sha256=BgN1pz4dtLB_Y0C16_JhcRVxIfU7srZk24tUBSLJPAs,20597
@@ -45,16 +46,16 @@ databricks/labs/lakebridge/errors/exceptions.py,sha256=PIj8wRJpxrBXOLMMt9HQhBfhZ
45
46
  databricks/labs/lakebridge/helpers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
46
47
  databricks/labs/lakebridge/helpers/db_sql.py,sha256=chFHpn6XIuC0GrJ3a30_Y7tcXd4KZ5qO9zCAI4d7TR0,806
47
48
  databricks/labs/lakebridge/helpers/execution_time.py,sha256=8oLEYh0AKz1fuiQMyDTWDymhxh6xUKlcFpINWzKnOy4,533
48
- databricks/labs/lakebridge/helpers/file_utils.py,sha256=rzZ0IBu7dlPNRtzLnzOv4sZZd2FBPPQNdodbkb3PQEI,1991
49
+ databricks/labs/lakebridge/helpers/file_utils.py,sha256=0EISLVIleoKe0bzdvhd6RRWjbauX7eNB1eHSV9-2SIo,1676
49
50
  databricks/labs/lakebridge/helpers/metastore.py,sha256=1SKsIfNtiu3jUFjaXZ5B1fBZigVYqS1Q2OWhdn9qa8U,6425
50
51
  databricks/labs/lakebridge/helpers/recon_config_utils.py,sha256=1Nq_pIonE2tz08kdVpSDS-NVKGZ1p_kGRZBUQFFWZAs,7404
51
- databricks/labs/lakebridge/helpers/string_utils.py,sha256=_ovbMl7lXMKgYi1AF5UZQBcF1ZDs1PsSJV05erbEvg4,2786
52
+ databricks/labs/lakebridge/helpers/string_utils.py,sha256=TKW0BHmOZ2G8EebCohQRJLYglqeJajHgQ2BLehf9qsE,1169
52
53
  databricks/labs/lakebridge/helpers/telemetry_utils.py,sha256=M0lqYcLdLKROnnu3KRJUlGS368IRPIACef4G1ae9cvA,435
53
54
  databricks/labs/lakebridge/helpers/validation.py,sha256=97AoCcsliWKUBKAY8SwgL4Dad-r_W59L_7h2I3Pudmk,4449
54
55
  databricks/labs/lakebridge/intermediate/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
55
56
  databricks/labs/lakebridge/intermediate/dag.py,sha256=47bgyaYaBK_ELwLE5VGgFUraSxKdMJkLmo2lfc602lI,3165
56
57
  databricks/labs/lakebridge/intermediate/engine_adapter.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
57
- databricks/labs/lakebridge/intermediate/root_tables.py,sha256=_3I0ks9qi9Al6HcURZ-gZ35IuP6dk7RUVuAOZHMcHp0,1497
58
+ databricks/labs/lakebridge/intermediate/root_tables.py,sha256=G9PFU22qJ0BgV1FGZPK5bWNdEa8Xpo_gyEvMmATHkTw,1524
58
59
  databricks/labs/lakebridge/reconcile/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
59
60
  databricks/labs/lakebridge/reconcile/compare.py,sha256=P9ABIT95TeS7BVRYVbzjpaEsynF2h2m5M8f9-he8A3A,16136
60
61
  databricks/labs/lakebridge/reconcile/constants.py,sha256=TrWuQFgSoBhRZkpiuscewG5eaZKV-FsErUQt4JO0cxo,798
@@ -145,11 +146,11 @@ databricks/labs/lakebridge/resources/reconcile/queries/installation/details.sql,
145
146
  databricks/labs/lakebridge/resources/reconcile/queries/installation/main.sql,sha256=s_A0YyGSX_pCWnQsQnY65VYFcbNvq2qKJvYxU6zam6E,794
146
147
  databricks/labs/lakebridge/resources/reconcile/queries/installation/metrics.sql,sha256=FdvjQp7gCwsbcu4UrOuJN-bBLJFpvUIyxH6PQvg04Wo,1006
147
148
  databricks/labs/lakebridge/transpiler/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
148
- databricks/labs/lakebridge/transpiler/execute.py,sha256=EOVAXHFzmfsS7cA5ogWKEUxGG6KokR-7PDZ8BdVwuz8,16871
149
+ databricks/labs/lakebridge/transpiler/execute.py,sha256=IUlz682ptMndolCI7IIGRTXJpJnGXHaEZXZnCLdCEDA,16956
149
150
  databricks/labs/lakebridge/transpiler/transpile_engine.py,sha256=9o-MXAnCChbFxv9Kg8kGLXdc8BZmtlwV5JdMPiuTQNk,1827
150
151
  databricks/labs/lakebridge/transpiler/transpile_status.py,sha256=MO-Ju-ki3FCY15WxgwfPV9EC7Ma9q8aIfSTgHAmnkGU,1715
151
152
  databricks/labs/lakebridge/transpiler/lsp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
152
- databricks/labs/lakebridge/transpiler/lsp/lsp_engine.py,sha256=6H12u41I6HxXqF8SII6janxXkT4GG9VcrsDFeOmA7Cg,22325
153
+ databricks/labs/lakebridge/transpiler/lsp/lsp_engine.py,sha256=Whdb3Usi0_lq7i94c_D2XNXBTm-bfy3bso4nKIEq_qk,22205
153
154
  databricks/labs/lakebridge/transpiler/sqlglot/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
154
155
  databricks/labs/lakebridge/transpiler/sqlglot/dialect_utils.py,sha256=GhXXWGA_2PlmHKjxrjryZpA5xaVZ81Vrw3b7DzjpFFI,1033
155
156
  databricks/labs/lakebridge/transpiler/sqlglot/lca_utils.py,sha256=vpDLGhE-wFMah1VTXkMg6gI_QnzdzpYZf0h9DUd8zcI,5154
@@ -163,9 +164,9 @@ databricks/labs/lakebridge/transpiler/sqlglot/parsers/presto.py,sha256=bY6Ku8ZPW
163
164
  databricks/labs/lakebridge/transpiler/sqlglot/parsers/snowflake.py,sha256=dZ7BdOlBZlkbiN9G9bu4l2c456265Gx9WoWUPRa7Ffg,23203
164
165
  databricks/labs/lakebridge/upgrades/v0.4.0_add_main_table_operation_name_column.py,sha256=wMTbj1q5td4fa5DCk0tWFJ-OmhhzsExRLYUe4PKmk0s,3527
165
166
  databricks/labs/lakebridge/upgrades/v0.6.0_alter_metrics_datatype.py,sha256=hnTHRtqzwPSF5Judzh6ss-uB5h3IFtm2ylWduwRNq5Y,2424
166
- databricks_labs_lakebridge-0.10.1.dist-info/METADATA,sha256=nGDYIxZ1D8NBXRz5ePlIUsulJHW-ljJ9QHdxhVZrqck,3078
167
- databricks_labs_lakebridge-0.10.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
168
- databricks_labs_lakebridge-0.10.1.dist-info/entry_points.txt,sha256=Idr1CT73b8wShdr287yu1hheGbDbhBvucVUlZcbpiPo,75
169
- databricks_labs_lakebridge-0.10.1.dist-info/licenses/LICENSE,sha256=1hG0Cvw6mp9nL9qRoHFcCUk9fYqhcnj2vgJ75rt3BxA,3862
170
- databricks_labs_lakebridge-0.10.1.dist-info/licenses/NOTICE,sha256=wtxMsNvTkw1hAEkkWHz8A8JrYySAUSt1tOTcqddkWEg,1797
171
- databricks_labs_lakebridge-0.10.1.dist-info/RECORD,,
167
+ databricks_labs_lakebridge-0.10.2.dist-info/METADATA,sha256=vZS_ckQImF3YDBo5xH0S3KXPOPn78Stn689GBu1dR-Q,3078
168
+ databricks_labs_lakebridge-0.10.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
169
+ databricks_labs_lakebridge-0.10.2.dist-info/entry_points.txt,sha256=Idr1CT73b8wShdr287yu1hheGbDbhBvucVUlZcbpiPo,75
170
+ databricks_labs_lakebridge-0.10.2.dist-info/licenses/LICENSE,sha256=1hG0Cvw6mp9nL9qRoHFcCUk9fYqhcnj2vgJ75rt3BxA,3862
171
+ databricks_labs_lakebridge-0.10.2.dist-info/licenses/NOTICE,sha256=wtxMsNvTkw1hAEkkWHz8A8JrYySAUSt1tOTcqddkWEg,1797
172
+ databricks_labs_lakebridge-0.10.2.dist-info/RECORD,,
@@ -0,0 +1,42 @@
1
+ import React, {memo, type ReactNode} from 'react';
2
+ import {useLocation, useHistory} from 'react-router';
3
+ import type {PropSidebarItem} from '@docusaurus/plugin-content-docs';
4
+ import {
5
+ DocSidebarItemsExpandedStateProvider,
6
+ useVisibleSidebarItems,
7
+ } from '@docusaurus/plugin-content-docs/client';
8
+ import DocSidebarItem from '@theme/DocSidebarItem';
9
+
10
+ import type {Props} from '@theme/DocSidebarItems';
11
+
12
+ function DocSidebarItems({items, onItemClick, ...props}: Props): ReactNode {
13
+ const location = useLocation();
14
+ const history = useHistory();
15
+ const visibleItems = useVisibleSidebarItems(items, props.activePath);
16
+
17
+ /**
18
+ * Additional logic for handling custom UI scenarios
19
+ */
20
+ const onClickHandler = (params: PropSidebarItem) => {
21
+ if (onItemClick) {
22
+ onItemClick(params);
23
+ }
24
+
25
+ // show initial page on menu collapse
26
+ if (params.type === "category") {
27
+ if (location.pathname !== params.href && location.pathname.includes(params.href)) {
28
+ history.push(params.href);
29
+ }
30
+ }
31
+ }
32
+
33
+ return (
34
+ <DocSidebarItemsExpandedStateProvider>
35
+ {visibleItems.map((item, index) => (
36
+ <DocSidebarItem key={index} item={item} index={index} {...props} onItemClick={onClickHandler} />
37
+ ))}
38
+ </DocSidebarItemsExpandedStateProvider>
39
+ );
40
+ }
41
+
42
+ export default memo(DocSidebarItems);