databricks-labs-lakebridge 0.10.0__py3-none-any.whl → 0.10.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- databricks/labs/lakebridge/__about__.py +1 -1
- databricks/labs/lakebridge/base_install.py +2 -1
- databricks/labs/lakebridge/cli.py +30 -6
- databricks/labs/lakebridge/config.py +7 -7
- databricks/labs/lakebridge/deployment/recon.py +2 -1
- databricks/labs/lakebridge/install.py +46 -23
- databricks/labs/lakebridge/transpiler/execute.py +54 -41
- databricks/labs/lakebridge/uninstall.py +1 -1
- {databricks_labs_lakebridge-0.10.0.dist-info → databricks_labs_lakebridge-0.10.1.dist-info}/METADATA +4 -4
- {databricks_labs_lakebridge-0.10.0.dist-info → databricks_labs_lakebridge-0.10.1.dist-info}/RECORD +14 -14
- {databricks_labs_lakebridge-0.10.0.dist-info → databricks_labs_lakebridge-0.10.1.dist-info}/WHEEL +0 -0
- {databricks_labs_lakebridge-0.10.0.dist-info → databricks_labs_lakebridge-0.10.1.dist-info}/entry_points.txt +0 -0
- {databricks_labs_lakebridge-0.10.0.dist-info → databricks_labs_lakebridge-0.10.1.dist-info}/licenses/LICENSE +0 -0
- {databricks_labs_lakebridge-0.10.0.dist-info → databricks_labs_lakebridge-0.10.1.dist-info}/licenses/NOTICE +0 -0
@@ -1,2 +1,2 @@
|
|
1
1
|
# DO NOT MODIFY THIS FILE
|
2
|
-
__version__ = "0.10.
|
2
|
+
__version__ = "0.10.1"
|
@@ -9,4 +9,5 @@ if __name__ == "__main__":
|
|
9
9
|
logger = get_logger(__file__)
|
10
10
|
logger.setLevel("INFO")
|
11
11
|
|
12
|
-
logger.info("Successfully Setup
|
12
|
+
logger.info("Successfully Setup Lakebridge Components Locally")
|
13
|
+
logger.info("For more information, please visit https://databrickslabs.github.io/lakebridge/")
|
@@ -1,6 +1,8 @@
|
|
1
1
|
import asyncio
|
2
2
|
import dataclasses
|
3
|
+
import itertools
|
3
4
|
import json
|
5
|
+
import logging
|
4
6
|
import os
|
5
7
|
import time
|
6
8
|
from pathlib import Path
|
@@ -11,7 +13,7 @@ from databricks.sdk.service.sql import CreateWarehouseRequestWarehouseType
|
|
11
13
|
from databricks.sdk import WorkspaceClient
|
12
14
|
|
13
15
|
from databricks.labs.blueprint.cli import App
|
14
|
-
from databricks.labs.blueprint.entrypoint import get_logger
|
16
|
+
from databricks.labs.blueprint.entrypoint import get_logger, is_in_debug
|
15
17
|
from databricks.labs.blueprint.installation import JsonValue
|
16
18
|
from databricks.labs.blueprint.tui import Prompts
|
17
19
|
|
@@ -40,6 +42,7 @@ from databricks.labs.lakebridge.transpiler.lsp.lsp_engine import LSPConfig
|
|
40
42
|
from databricks.labs.lakebridge.transpiler.sqlglot.sqlglot_engine import SqlglotEngine
|
41
43
|
from databricks.labs.lakebridge.transpiler.transpile_engine import TranspileEngine
|
42
44
|
|
45
|
+
from databricks.labs.lakebridge.transpiler.transpile_status import ErrorSeverity
|
43
46
|
|
44
47
|
lakebridge = App(__file__)
|
45
48
|
logger = get_logger(__file__)
|
@@ -189,7 +192,7 @@ class _TranspileConfigChecker:
|
|
189
192
|
transpiler_name = self._prompts.choice("Select the transpiler:", list(transpiler_names))
|
190
193
|
else:
|
191
194
|
transpiler_name = next(name for name in transpiler_names)
|
192
|
-
logger.info(f"
|
195
|
+
logger.info(f"Lakebridge will use the {transpiler_name} transpiler")
|
193
196
|
transpiler_config_path = str(TranspilerInstaller.transpiler_config_path(transpiler_name))
|
194
197
|
logger.debug(f"Setting transpiler_config_path to '{transpiler_config_path}'")
|
195
198
|
self._config = dataclasses.replace(self._config, transpiler_config_path=cast(str, transpiler_config_path))
|
@@ -306,11 +309,29 @@ async def _transpile(ctx: ApplicationContext, config: TranspileConfig, engine: T
|
|
306
309
|
logger.debug(f"User: {user}")
|
307
310
|
_override_workspace_client_config(ctx, config.sdk_config)
|
308
311
|
status, errors = await do_transpile(ctx.workspace_client, engine, config)
|
309
|
-
|
310
|
-
|
312
|
+
|
313
|
+
logger.debug(f"Transpilation completed with status: {status}")
|
314
|
+
|
315
|
+
for path, errors_by_path in itertools.groupby(errors, key=lambda x: x.path):
|
316
|
+
errs = list(errors_by_path)
|
317
|
+
errors_by_severity = {
|
318
|
+
severity.name: len(list(errors)) for severity, errors in itertools.groupby(errs, key=lambda x: x.severity)
|
319
|
+
}
|
320
|
+
reports = []
|
321
|
+
reported_severities = [ErrorSeverity.ERROR, ErrorSeverity.WARNING]
|
322
|
+
for severity in reported_severities:
|
323
|
+
if severity.name in errors_by_severity:
|
324
|
+
word = str.lower(severity.name) + "s" if errors_by_severity[severity.name] > 1 else ""
|
325
|
+
reports.append(f"{errors_by_severity[severity.name]} {word}")
|
326
|
+
|
327
|
+
msg = ", ".join(reports) + " found"
|
328
|
+
|
329
|
+
if ErrorSeverity.ERROR.name in errors_by_severity:
|
330
|
+
logger.error(f"{path}: {msg}")
|
331
|
+
elif ErrorSeverity.WARNING.name in errors_by_severity:
|
332
|
+
logger.warning(f"{path}: {msg}")
|
311
333
|
|
312
334
|
# Table Template in labs.yml requires the status to be list of dicts Do not change this
|
313
|
-
logger.info(f"lakebridge Transpiler encountered {len(status)} from given {config.input_source} files.")
|
314
335
|
return [status]
|
315
336
|
|
316
337
|
|
@@ -442,8 +463,11 @@ def analyze(w: WorkspaceClient, source_directory: str, report_file: str):
|
|
442
463
|
with_user_agent_extra("analyzer_source_tech", make_alphanum_or_semver(source_tech))
|
443
464
|
user = ctx.current_user
|
444
465
|
logger.debug(f"User: {user}")
|
445
|
-
|
466
|
+
is_debug = logger.getEffectiveLevel() == logging.DEBUG
|
467
|
+
Analyzer.analyze(Path(input_folder), Path(output_file), source_tech, is_debug=is_debug)
|
446
468
|
|
447
469
|
|
448
470
|
if __name__ == "__main__":
|
449
471
|
lakebridge()
|
472
|
+
if is_in_debug():
|
473
|
+
logger.setLevel(logging.DEBUG)
|
@@ -4,7 +4,7 @@ import logging
|
|
4
4
|
from dataclasses import dataclass
|
5
5
|
from enum import Enum, auto
|
6
6
|
from pathlib import Path
|
7
|
-
from typing import Any, cast
|
7
|
+
from typing import Any, Literal, cast
|
8
8
|
|
9
9
|
from databricks.labs.blueprint.installation import JsonValue
|
10
10
|
from databricks.labs.blueprint.tui import Prompts
|
@@ -89,30 +89,30 @@ class TranspileConfig:
|
|
89
89
|
return Path(self.transpiler_config_path) if self.transpiler_config_path is not None else None
|
90
90
|
|
91
91
|
@property
|
92
|
-
def input_path(self):
|
92
|
+
def input_path(self) -> Path:
|
93
93
|
if self.input_source is None:
|
94
94
|
raise ValueError("Missing input source!")
|
95
95
|
return Path(self.input_source)
|
96
96
|
|
97
97
|
@property
|
98
|
-
def output_path(self):
|
98
|
+
def output_path(self) -> Path | None:
|
99
99
|
return None if self.output_folder is None else Path(self.output_folder)
|
100
100
|
|
101
101
|
@property
|
102
|
-
def error_path(self):
|
102
|
+
def error_path(self) -> Path | None:
|
103
103
|
return Path(self.error_file_path) if self.error_file_path else None
|
104
104
|
|
105
105
|
@property
|
106
|
-
def target_dialect(self):
|
106
|
+
def target_dialect(self) -> Literal["databricks"]:
|
107
107
|
return "databricks"
|
108
108
|
|
109
109
|
@classmethod
|
110
|
-
def v1_migrate(cls, raw: dict) -> dict:
|
110
|
+
def v1_migrate(cls, raw: dict[str, Any]) -> dict[str, Any]:
|
111
111
|
raw["version"] = 2
|
112
112
|
return raw
|
113
113
|
|
114
114
|
@classmethod
|
115
|
-
def v2_migrate(cls, raw: dict) -> dict:
|
115
|
+
def v2_migrate(cls, raw: dict[str, Any]) -> dict[str, Any]:
|
116
116
|
del raw["mode"]
|
117
117
|
key_mapping = {"input_sql": "input_source", "output_folder": "output_path", "source": "source_dialect"}
|
118
118
|
raw["version"] = 3
|
@@ -46,7 +46,8 @@ class ReconDeployment:
|
|
46
46
|
logger.info("Installing reconcile components.")
|
47
47
|
self._deploy_tables(recon_config)
|
48
48
|
self._deploy_dashboards(recon_config)
|
49
|
-
|
49
|
+
# TODO INVESTIGATE: Why is this needed?
|
50
|
+
remorph_wheel_path = [whl for whl in wheel_paths if "lakebridge" in whl][0]
|
50
51
|
self._deploy_jobs(recon_config, remorph_wheel_path)
|
51
52
|
self._install_state.save()
|
52
53
|
logger.info("Installation of reconcile components completed successfully.")
|
@@ -558,7 +558,7 @@ class WorkspaceInstaller:
|
|
558
558
|
@classmethod
|
559
559
|
def install_morpheus(cls, artifact: Path | None = None):
|
560
560
|
java_version = cls.get_java_version()
|
561
|
-
if java_version is None or java_version <
|
561
|
+
if java_version is None or java_version < (11, 0, 0, 0):
|
562
562
|
logger.warning(
|
563
563
|
"This software requires Java 11 or above. Please install Java and re-run 'install-transpile'."
|
564
564
|
)
|
@@ -582,25 +582,48 @@ class WorkspaceInstaller:
|
|
582
582
|
logger.fatal(f"Cannot install unsupported artifact: {artifact}")
|
583
583
|
|
584
584
|
@classmethod
|
585
|
-
def get_java_version(cls) -> int | None:
|
586
|
-
|
587
|
-
|
588
|
-
|
589
|
-
|
590
|
-
return None
|
591
|
-
result = completed.stderr.decode("utf-8")
|
592
|
-
start = result.find(" version ")
|
593
|
-
if start < 0:
|
585
|
+
def get_java_version(cls) -> tuple[int, int, int, int] | None:
|
586
|
+
# Platform-independent way to reliably locate the java executable.
|
587
|
+
# Reference: https://docs.python.org/3.10/library/subprocess.html#popen-constructor
|
588
|
+
java_executable = shutil.which("java")
|
589
|
+
if java_executable is None:
|
594
590
|
return None
|
595
|
-
|
596
|
-
|
591
|
+
try:
|
592
|
+
completed = run([java_executable, "-version"], shell=False, capture_output=True, check=True)
|
593
|
+
except CalledProcessError as e:
|
594
|
+
logger.debug(
|
595
|
+
f"Failed to run {e.args!r} (exit-code={e.returncode}, stdout={e.stdout!r}, stderr={e.stderr!r})",
|
596
|
+
exc_info=e,
|
597
|
+
)
|
597
598
|
return None
|
598
|
-
|
599
|
-
|
599
|
+
# It might not be ascii, but the bits we care about are so this will never fail.
|
600
|
+
java_version_output = completed.stderr.decode("ascii", errors="ignore")
|
601
|
+
java_version = cls._parse_java_version(java_version_output)
|
602
|
+
logger.debug(f"Detected java version: {java_version}")
|
603
|
+
return java_version
|
604
|
+
|
605
|
+
# Pattern to match a Java version string, compiled at import time to ensure it's valid.
|
606
|
+
# Ref: https://docs.oracle.com/en/java/javase/11/install/version-string-format.html
|
607
|
+
_java_version_pattern = re.compile(
|
608
|
+
r' version "(?P<feature>\d+)(?:\.(?P<interim>\d+)(?:\.(?P<update>\d+)(?:\.(?P<patch>\d+))?)?)?"'
|
609
|
+
)
|
610
|
+
|
611
|
+
@classmethod
|
612
|
+
def _parse_java_version(cls, version: str) -> tuple[int, int, int, int] | None:
|
613
|
+
"""Locate and parse the Java version in the output of `java -version`."""
|
614
|
+
# Output looks like this:
|
615
|
+
# openjdk version "24.0.1" 2025-04-15
|
616
|
+
# OpenJDK Runtime Environment Temurin-24.0.1+9 (build 24.0.1+9)
|
617
|
+
# OpenJDK 64-Bit Server VM Temurin-24.0.1+9 (build 24.0.1+9, mixed mode)
|
618
|
+
match = cls._java_version_pattern.search(version)
|
619
|
+
if not match:
|
620
|
+
logger.debug(f"Could not parse java version: {version!r}")
|
600
621
|
return None
|
601
|
-
|
602
|
-
|
603
|
-
|
622
|
+
feature = int(match["feature"])
|
623
|
+
interim = int(match["interim"] or 0)
|
624
|
+
update = int(match["update"] or 0)
|
625
|
+
patch = int(match["patch"] or 0)
|
626
|
+
return feature, interim, update, patch
|
604
627
|
|
605
628
|
def configure(self, module: str) -> RemorphConfigs:
|
606
629
|
match module:
|
@@ -624,10 +647,10 @@ class WorkspaceInstaller:
|
|
624
647
|
|
625
648
|
def _configure_transpile(self) -> TranspileConfig:
|
626
649
|
try:
|
627
|
-
self._installation.load(TranspileConfig)
|
650
|
+
config = self._installation.load(TranspileConfig)
|
628
651
|
logger.info("Lakebridge `transpile` is already installed on this workspace.")
|
629
652
|
if not self._prompts.confirm("Do you want to override the existing installation?"):
|
630
|
-
|
653
|
+
return config
|
631
654
|
except NotFound:
|
632
655
|
logger.info("Couldn't find existing `transpile` installation")
|
633
656
|
except (PermissionDenied, SerdeError, ValueError, AttributeError):
|
@@ -689,8 +712,7 @@ class WorkspaceInstaller:
|
|
689
712
|
transpiler_name = None
|
690
713
|
else:
|
691
714
|
transpiler_name = next(t for t in transpilers)
|
692
|
-
|
693
|
-
logger.info(f"lakebridge will use the {transpiler_name} transpiler")
|
715
|
+
logger.info(f"Lakebridge will use the {transpiler_name} transpiler")
|
694
716
|
if transpiler_name:
|
695
717
|
transpiler_config_path = self._transpiler_config_path(transpiler_name)
|
696
718
|
transpiler_options: dict[str, JsonValue] | None = None
|
@@ -749,10 +771,11 @@ class WorkspaceInstaller:
|
|
749
771
|
def _configure_reconcile(self) -> ReconcileConfig:
|
750
772
|
try:
|
751
773
|
self._installation.load(ReconcileConfig)
|
752
|
-
logger.info("
|
774
|
+
logger.info("Lakebridge `reconcile` is already installed on this workspace.")
|
753
775
|
if not self._prompts.confirm("Do you want to override the existing installation?"):
|
776
|
+
# TODO: Exit gracefully, without raising SystemExit
|
754
777
|
raise SystemExit(
|
755
|
-
"
|
778
|
+
"Lakebridge `reconcile` is already installed and no override has been requested. Exiting..."
|
756
779
|
)
|
757
780
|
except NotFound:
|
758
781
|
logger.info("Couldn't find existing `reconcile` installation")
|
@@ -1,7 +1,7 @@
|
|
1
1
|
import asyncio
|
2
2
|
import dataclasses
|
3
3
|
import logging
|
4
|
-
import
|
4
|
+
from email import policy
|
5
5
|
from email.message import Message
|
6
6
|
from email.parser import Parser as EmailParser
|
7
7
|
from pathlib import Path
|
@@ -98,7 +98,8 @@ def _is_combined_result(result: TranspileResult):
|
|
98
98
|
|
99
99
|
def _process_combined_result(context: TranspilingContext, _error_list: list[TranspileError]) -> None:
|
100
100
|
# TODO error handling
|
101
|
-
|
101
|
+
# Added policy to process quoted-printable encoded
|
102
|
+
parser = EmailParser(policy=policy.default)
|
102
103
|
transpiled_code: str = cast(str, context.transpiled_code)
|
103
104
|
message: Message = parser.parsestr(transpiled_code)
|
104
105
|
for part in message.walk():
|
@@ -107,18 +108,27 @@ def _process_combined_result(context: TranspilingContext, _error_list: list[Tran
|
|
107
108
|
|
108
109
|
def _process_combined_part(context: TranspilingContext, part: Message) -> None:
|
109
110
|
if part.get_content_type() != "text/plain":
|
110
|
-
return
|
111
|
+
return # TODO Need to handle other content types, e.g., text/binary, application/json, etc.
|
111
112
|
filename = part.get_filename()
|
112
|
-
|
113
|
-
|
114
|
-
|
113
|
+
payload = part.get_payload(decode=True)
|
114
|
+
charset = part.get_content_charset() or "utf-8"
|
115
|
+
if isinstance(payload, bytes):
|
116
|
+
content = payload.decode(charset)
|
117
|
+
else:
|
118
|
+
content = str(payload)
|
119
|
+
logger.debug(f"Processing file: {filename}")
|
120
|
+
|
121
|
+
if not filename:
|
122
|
+
return # TODO Raise exception!!!!
|
123
|
+
filename = Path(filename).name
|
115
124
|
folder = context.output_folder
|
116
125
|
segments = filename.split("/")
|
117
126
|
for segment in segments[:-1]:
|
118
127
|
folder = folder / segment
|
119
128
|
folder.mkdir(parents=True, exist_ok=True)
|
120
129
|
output = folder / segments[-1]
|
121
|
-
|
130
|
+
logger.debug(f"Writing output to: {output}")
|
131
|
+
output.write_text(content)
|
122
132
|
|
123
133
|
|
124
134
|
def _process_single_result(context: TranspilingContext, error_list: list[TranspileError]) -> None:
|
@@ -128,14 +138,6 @@ def _process_single_result(context: TranspilingContext, error_list: list[Transpi
|
|
128
138
|
if any(err.kind == ErrorKind.PARSING for err in error_list):
|
129
139
|
output_code = context.source_code or ""
|
130
140
|
|
131
|
-
if error_list:
|
132
|
-
with_line_numbers = ""
|
133
|
-
lines = output_code.split("\n")
|
134
|
-
line_number_width = math.floor(math.log(len(lines), 10)) + 1
|
135
|
-
for line_number, line in enumerate(lines, start=1):
|
136
|
-
with_line_numbers += f"/* {line_number:{line_number_width}d} */ {line}\n"
|
137
|
-
output_code = with_line_numbers
|
138
|
-
|
139
141
|
elif context.validator:
|
140
142
|
logger.debug(f"Validating transpiled code for file: {context.input_path}")
|
141
143
|
validation_result = _validation(context.validator, context.config, str(context.transpiled_code))
|
@@ -156,38 +158,33 @@ def _process_single_result(context: TranspilingContext, error_list: list[Transpi
|
|
156
158
|
|
157
159
|
output_path = cast(Path, context.output_path)
|
158
160
|
with output_path.open("w") as w:
|
159
|
-
w.write(
|
161
|
+
w.write(make_header(context.input_path, error_list))
|
160
162
|
w.write(output_code)
|
161
163
|
|
162
164
|
logger.info(f"Processed file: {context.input_path} (errors: {len(error_list)})")
|
163
165
|
|
164
166
|
|
165
|
-
def
|
167
|
+
def make_header(file_path: Path, errors: list[TranspileError]) -> str:
|
166
168
|
header = ""
|
167
169
|
failed_producing_output = False
|
168
|
-
diag_by_severity = {
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
diag_by_severity[severity] = list(diags)
|
170
|
+
diag_by_severity = {
|
171
|
+
severity.name: list(diags) for severity, diags in itertools.groupby(errors, key=lambda x: x.severity)
|
172
|
+
}
|
173
|
+
line_numbers: dict[int, int] = {}
|
173
174
|
|
174
|
-
if ErrorSeverity.ERROR in diag_by_severity:
|
175
|
+
if ErrorSeverity.ERROR.name in diag_by_severity:
|
175
176
|
header += f"/*\n Failed transpilation of {file_path}\n"
|
176
177
|
header += "\n The following errors were found while transpiling:\n"
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
failed_producing_output = failed_producing_output or diag.kind == ErrorKind.PARSING
|
178
|
+
header += _append_diagnostics(diag_by_severity[ErrorSeverity.ERROR.name], line_numbers)
|
179
|
+
failed_producing_output = failed_producing_output or any(
|
180
|
+
x.kind == ErrorKind.PARSING for x in diag_by_severity[ErrorSeverity.ERROR.name]
|
181
|
+
)
|
182
182
|
else:
|
183
183
|
header += f"/*\n Successfully transpiled from {file_path}\n"
|
184
184
|
|
185
|
-
if ErrorSeverity.WARNING in diag_by_severity:
|
185
|
+
if ErrorSeverity.WARNING.name in diag_by_severity:
|
186
186
|
header += "\n The following warnings were found while transpiling:\n"
|
187
|
-
|
188
|
-
if diag.range:
|
189
|
-
line_numbers[diag.range.start.line] = 0
|
190
|
-
header += _append_diagnostic(diag)
|
187
|
+
header += _append_diagnostics(diag_by_severity[ErrorSeverity.WARNING.name], line_numbers)
|
191
188
|
|
192
189
|
if failed_producing_output:
|
193
190
|
header += "\n\n Parsing errors prevented the converter from translating the input query.\n"
|
@@ -203,13 +200,29 @@ def _make_header(file_path: Path, errors: list[TranspileError]) -> str:
|
|
203
200
|
return header.format(line_numbers=line_numbers)
|
204
201
|
|
205
202
|
|
206
|
-
def
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
203
|
+
def _append_diagnostics(diagnostics: list[TranspileError], line_numbers: dict) -> str:
|
204
|
+
header = ""
|
205
|
+
grouped_by_message = {msg: list(diags) for msg, diags in itertools.groupby(diagnostics, lambda x: x.message)}
|
206
|
+
for msg, occurrences in grouped_by_message.items():
|
207
|
+
for occurrence in occurrences:
|
208
|
+
if occurrence.range:
|
209
|
+
line_numbers.update({occurrence.range.start.line: 0})
|
210
|
+
header += _append_diagnostic(msg, occurrences)
|
211
|
+
return header
|
212
|
+
|
213
|
+
|
214
|
+
def _append_diagnostic(msg: str, diags: list[TranspileError]) -> str:
|
215
|
+
positions = [
|
216
|
+
f"[{{line_numbers[{diag.range.start.line}]}}:{diag.range.start.character + 1}]" for diag in diags if diag.range
|
217
|
+
]
|
218
|
+
message = msg.replace("{", "{{").replace("}", "}}").replace("/n", "\n ")
|
219
|
+
result = f" - {message}\n" if len(positions) != 1 else f" - {positions[0]} {message}\n"
|
220
|
+
if len(positions) > 1:
|
221
|
+
positions_str = ", ".join(positions)
|
222
|
+
result += f" Occurred {len(diags)} times at the following positions: {positions_str}\n"
|
223
|
+
elif len(diags) > 1:
|
224
|
+
result += f" Occurred {len(diags)} times\n"
|
225
|
+
return result
|
213
226
|
|
214
227
|
|
215
228
|
async def _process_many_files(
|
@@ -330,7 +343,7 @@ async def _do_transpile(
|
|
330
343
|
msg = f"{config.input_source} does not exist."
|
331
344
|
logger.error(msg)
|
332
345
|
raise FileNotFoundError(msg)
|
333
|
-
logger.
|
346
|
+
logger.debug(f"Transpiler results: {result}")
|
334
347
|
|
335
348
|
if not config.skip_validation:
|
336
349
|
logger.info(f"SQL validation errors: {result.validation_error_count}")
|
{databricks_labs_lakebridge-0.10.0.dist-info → databricks_labs_lakebridge-0.10.1.dist-info}/METADATA
RENAMED
@@ -1,8 +1,8 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: databricks-labs-lakebridge
|
3
|
-
Version: 0.10.
|
3
|
+
Version: 0.10.1
|
4
4
|
Summary: Fast and predictable migrations to Databricks Lakehouse Platform. This tool is designed to help you migrate your data and workloads to the Databricks Lakehouse Platform in a fast, predictable, and reliable way. It provides a set of tools and utilities to help you reconcile your data and workloads, assess your current state, and plan your migration.
|
5
|
-
Project-URL: Documentation, https://github.
|
5
|
+
Project-URL: Documentation, https://databrickslabs.github.io/lakebridge
|
6
6
|
Project-URL: Issues, https://github.com/databrickslabs/lakebridge/issues
|
7
7
|
Project-URL: Source, https://github.com/databrickslabs/lakebridge
|
8
8
|
Maintainer-email: Databricks Labs <labs-oss@databricks.com>
|
@@ -25,7 +25,7 @@ Classifier: Topic :: Software Development :: Libraries
|
|
25
25
|
Classifier: Topic :: Utilities
|
26
26
|
Requires-Python: >=3.10
|
27
27
|
Requires-Dist: cryptography<45.1.0,>=44.0.2
|
28
|
-
Requires-Dist: databricks-bb-analyzer~=0.1.
|
28
|
+
Requires-Dist: databricks-bb-analyzer~=0.1.7
|
29
29
|
Requires-Dist: databricks-labs-blueprint[yaml]<0.12.0,>=0.11.0
|
30
30
|
Requires-Dist: databricks-labs-lsql==0.16.0
|
31
31
|
Requires-Dist: databricks-sdk~=0.51.0
|
@@ -42,7 +42,7 @@ Databricks Labs Lakebridge
|
|
42
42
|

|
43
43
|
|
44
44
|
|
45
|
-
[](https://github.com/databrickslabs/remorph/actions/workflows/push.yml)
|
46
46
|

|
47
47
|
|
48
48
|
-----
|
{databricks_labs_lakebridge-0.10.0.dist-info → databricks_labs_lakebridge-0.10.1.dist-info}/RECORD
RENAMED
@@ -6,15 +6,15 @@ docs/lakebridge/src/theme/Footer/index.tsx,sha256=Jj8zY5WDiTLXwF_mAgld8Dh1A3MY1H
|
|
6
6
|
docs/lakebridge/src/theme/Layout/index.tsx,sha256=IkdLr13jKmLxT0jWQqrwqrjVXc8Rwd_kWNpTd1t2sc0,592
|
7
7
|
databricks/__init__.py,sha256=YqH8Hy8lHJxd0hLMZF6kWirUDdPiX90LRDX6S6yTMn0,261
|
8
8
|
databricks/labs/__init__.py,sha256=YqH8Hy8lHJxd0hLMZF6kWirUDdPiX90LRDX6S6yTMn0,261
|
9
|
-
databricks/labs/lakebridge/__about__.py,sha256=
|
9
|
+
databricks/labs/lakebridge/__about__.py,sha256=yME1bXdQJp7l83rLxzUY__9sQJos-JhiNDfB3KbW3WA,49
|
10
10
|
databricks/labs/lakebridge/__init__.py,sha256=nUNECqNvyfpT0aeWwlqG0ADT8U8ScCLb8WWpLydppcA,464
|
11
|
-
databricks/labs/lakebridge/base_install.py,sha256=
|
12
|
-
databricks/labs/lakebridge/cli.py,sha256=
|
13
|
-
databricks/labs/lakebridge/config.py,sha256=
|
14
|
-
databricks/labs/lakebridge/install.py,sha256=
|
11
|
+
databricks/labs/lakebridge/base_install.py,sha256=8NxXsNpgqXnuADKXVFh5oQL3osdvygRMY1amJwKfU08,490
|
12
|
+
databricks/labs/lakebridge/cli.py,sha256=BgN1pz4dtLB_Y0C16_JhcRVxIfU7srZk24tUBSLJPAs,20597
|
13
|
+
databricks/labs/lakebridge/config.py,sha256=Kxl_Yzo5ooiFrt95Gp7AwyLlPZopa4MmQKpBfpHso2Y,5872
|
14
|
+
databricks/labs/lakebridge/install.py,sha256=e7MdIWFHHr2-sp-NNe6inJ44s_jgL-b_g_Y4MJXtqNQ,38724
|
15
15
|
databricks/labs/lakebridge/jvmproxy.py,sha256=F9pXpemzdaJXwpshHxVM9PYU_eNn4zTCUFQ5vc9WIhA,1573
|
16
16
|
databricks/labs/lakebridge/lineage.py,sha256=Q2oky4RkODRHWMwIQIwbYXSdZTmRkMWwEh6RssBiQxY,1843
|
17
|
-
databricks/labs/lakebridge/uninstall.py,sha256=
|
17
|
+
databricks/labs/lakebridge/uninstall.py,sha256=hf36YgeW9XO2cRvvn6AXUZdihQ1ZMHnR38OVEF5sfRw,759
|
18
18
|
databricks/labs/lakebridge/assessments/configure_assessment.py,sha256=r0VDjh4fAqZAHcLSN1qmyAQaTIwWRoXyitU7JCKsT3w,8412
|
19
19
|
databricks/labs/lakebridge/assessments/pipeline.py,sha256=NlLyqy4ZkYdMX1ng4_5IWkNa5e06w8hzfwKQW3teWLQ,7728
|
20
20
|
databricks/labs/lakebridge/assessments/profiler_config.py,sha256=-ZwOdN-kpX7jmXwPWG_QrTuKipreQa-8Jz7KrlR98Ws,682
|
@@ -35,7 +35,7 @@ databricks/labs/lakebridge/deployment/configurator.py,sha256=uOowc6cJfpZl4LGAs0e
|
|
35
35
|
databricks/labs/lakebridge/deployment/dashboard.py,sha256=mFQFuw9YR8vcabYxCoZkgo247gttMaVokN8kYrJwz-Q,6002
|
36
36
|
databricks/labs/lakebridge/deployment/installation.py,sha256=qGpvceRGi1AVST2NEtLWsneI83mKpXEZfTn812VdMjI,5072
|
37
37
|
databricks/labs/lakebridge/deployment/job.py,sha256=J0zZao279P0qBH7idaREvcT-SjPsEGOL7mBgQ_ZTaZI,6090
|
38
|
-
databricks/labs/lakebridge/deployment/recon.py,sha256=
|
38
|
+
databricks/labs/lakebridge/deployment/recon.py,sha256=gRwuYE8GEg69rnYHv5IPOOwqdhZ2UPfwm0fOSFTL7pE,6258
|
39
39
|
databricks/labs/lakebridge/deployment/table.py,sha256=15pKlGRtGDiErq2lY3V-vMbbiKHSF4U-U3S6WvHeOA0,976
|
40
40
|
databricks/labs/lakebridge/deployment/upgrade_common.py,sha256=0WoMk_CUps5WfS_bm_U7zKGN6GMPcRWqdu4mgrPU0vs,4445
|
41
41
|
databricks/labs/lakebridge/discovery/table.py,sha256=8ovHJotNbPQZ5hmcKG005qt178lZ5Y41oZ4IpEhjldw,808
|
@@ -145,7 +145,7 @@ databricks/labs/lakebridge/resources/reconcile/queries/installation/details.sql,
|
|
145
145
|
databricks/labs/lakebridge/resources/reconcile/queries/installation/main.sql,sha256=s_A0YyGSX_pCWnQsQnY65VYFcbNvq2qKJvYxU6zam6E,794
|
146
146
|
databricks/labs/lakebridge/resources/reconcile/queries/installation/metrics.sql,sha256=FdvjQp7gCwsbcu4UrOuJN-bBLJFpvUIyxH6PQvg04Wo,1006
|
147
147
|
databricks/labs/lakebridge/transpiler/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
148
|
-
databricks/labs/lakebridge/transpiler/execute.py,sha256=
|
148
|
+
databricks/labs/lakebridge/transpiler/execute.py,sha256=EOVAXHFzmfsS7cA5ogWKEUxGG6KokR-7PDZ8BdVwuz8,16871
|
149
149
|
databricks/labs/lakebridge/transpiler/transpile_engine.py,sha256=9o-MXAnCChbFxv9Kg8kGLXdc8BZmtlwV5JdMPiuTQNk,1827
|
150
150
|
databricks/labs/lakebridge/transpiler/transpile_status.py,sha256=MO-Ju-ki3FCY15WxgwfPV9EC7Ma9q8aIfSTgHAmnkGU,1715
|
151
151
|
databricks/labs/lakebridge/transpiler/lsp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -163,9 +163,9 @@ databricks/labs/lakebridge/transpiler/sqlglot/parsers/presto.py,sha256=bY6Ku8ZPW
|
|
163
163
|
databricks/labs/lakebridge/transpiler/sqlglot/parsers/snowflake.py,sha256=dZ7BdOlBZlkbiN9G9bu4l2c456265Gx9WoWUPRa7Ffg,23203
|
164
164
|
databricks/labs/lakebridge/upgrades/v0.4.0_add_main_table_operation_name_column.py,sha256=wMTbj1q5td4fa5DCk0tWFJ-OmhhzsExRLYUe4PKmk0s,3527
|
165
165
|
databricks/labs/lakebridge/upgrades/v0.6.0_alter_metrics_datatype.py,sha256=hnTHRtqzwPSF5Judzh6ss-uB5h3IFtm2ylWduwRNq5Y,2424
|
166
|
-
databricks_labs_lakebridge-0.10.
|
167
|
-
databricks_labs_lakebridge-0.10.
|
168
|
-
databricks_labs_lakebridge-0.10.
|
169
|
-
databricks_labs_lakebridge-0.10.
|
170
|
-
databricks_labs_lakebridge-0.10.
|
171
|
-
databricks_labs_lakebridge-0.10.
|
166
|
+
databricks_labs_lakebridge-0.10.1.dist-info/METADATA,sha256=nGDYIxZ1D8NBXRz5ePlIUsulJHW-ljJ9QHdxhVZrqck,3078
|
167
|
+
databricks_labs_lakebridge-0.10.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
168
|
+
databricks_labs_lakebridge-0.10.1.dist-info/entry_points.txt,sha256=Idr1CT73b8wShdr287yu1hheGbDbhBvucVUlZcbpiPo,75
|
169
|
+
databricks_labs_lakebridge-0.10.1.dist-info/licenses/LICENSE,sha256=1hG0Cvw6mp9nL9qRoHFcCUk9fYqhcnj2vgJ75rt3BxA,3862
|
170
|
+
databricks_labs_lakebridge-0.10.1.dist-info/licenses/NOTICE,sha256=wtxMsNvTkw1hAEkkWHz8A8JrYySAUSt1tOTcqddkWEg,1797
|
171
|
+
databricks_labs_lakebridge-0.10.1.dist-info/RECORD,,
|
{databricks_labs_lakebridge-0.10.0.dist-info → databricks_labs_lakebridge-0.10.1.dist-info}/WHEEL
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|