databricks-labs-lakebridge 0.10.4__py3-none-any.whl → 0.10.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,2 +1,2 @@
1
1
  # DO NOT MODIFY THIS FILE
2
- __version__ = "0.10.4"
2
+ __version__ = "0.10.6"
@@ -33,14 +33,12 @@ from databricks.labs.lakebridge.contexts.application import ApplicationContext
33
33
  from databricks.labs.lakebridge.helpers.recon_config_utils import ReconConfigPrompts
34
34
  from databricks.labs.lakebridge.helpers.telemetry_utils import make_alphanum_or_semver
35
35
  from databricks.labs.lakebridge.install import WorkspaceInstaller
36
- from databricks.labs.lakebridge.install import TranspilerInstaller
37
36
  from databricks.labs.lakebridge.reconcile.runner import ReconcileRunner
38
37
  from databricks.labs.lakebridge.lineage import lineage_generator
39
38
  from databricks.labs.lakebridge.reconcile.recon_config import RECONCILE_OPERATION_NAME, AGG_RECONCILE_OPERATION_NAME
40
39
  from databricks.labs.lakebridge.transpiler.execute import transpile as do_transpile
41
-
42
-
43
40
  from databricks.labs.lakebridge.transpiler.lsp.lsp_engine import LSPEngine
41
+ from databricks.labs.lakebridge.transpiler.repository import TranspilerRepository
44
42
  from databricks.labs.lakebridge.transpiler.sqlglot.sqlglot_engine import SqlglotEngine
45
43
  from databricks.labs.lakebridge.transpiler.transpile_engine import TranspileEngine
46
44
 
@@ -54,7 +52,7 @@ def raise_validation_exception(msg: str) -> NoReturn:
54
52
  raise ValueError(msg)
55
53
 
56
54
 
57
- def _installer(ws: WorkspaceClient) -> WorkspaceInstaller:
55
+ def _installer(ws: WorkspaceClient, transpiler_repository: TranspilerRepository) -> WorkspaceInstaller:
58
56
  app_context = ApplicationContext(_verify_workspace_client(ws))
59
57
  return WorkspaceInstaller(
60
58
  app_context.workspace_client,
@@ -64,6 +62,7 @@ def _installer(ws: WorkspaceClient) -> WorkspaceInstaller:
64
62
  app_context.product_info,
65
63
  app_context.resource_configurator,
66
64
  app_context.workspace_installation,
65
+ transpiler_repository=transpiler_repository,
67
66
  )
68
67
 
69
68
 
@@ -114,12 +113,13 @@ def transpile(
114
113
  skip_validation: str | None = None,
115
114
  catalog_name: str | None = None,
116
115
  schema_name: str | None = None,
116
+ transpiler_repository: TranspilerRepository = TranspilerRepository.user_home(),
117
117
  ):
118
118
  """Transpiles source dialect to databricks dialect"""
119
119
  ctx = ApplicationContext(w)
120
120
  logger.debug(f"Preconfigured transpiler config: {ctx.transpile_config!r}")
121
121
  with_user_agent_extra("cmd", "execute-transpile")
122
- checker = _TranspileConfigChecker(ctx.transpile_config, ctx.prompts)
122
+ checker = _TranspileConfigChecker(ctx.transpile_config, ctx.prompts, transpiler_repository)
123
123
  checker.use_transpiler_config_path(transpiler_config_path)
124
124
  checker.use_source_dialect(source_dialect)
125
125
  checker.use_input_source(input_source)
@@ -188,14 +188,19 @@ class _TranspileConfigChecker:
188
188
 
189
189
  _config: TranspileConfig
190
190
  """The workspace configuration for transpiling, updated from command-line arguments."""
191
- # _engine: TranspileEngine | None
192
- # """The transpiler engine to use for transpiling, lazily loaded based on the configuration."""
193
191
  _prompts: Prompts
194
192
  """Prompting system, for requesting configuration that hasn't been provided."""
195
193
  _source_dialect_override: str | None = None
196
194
  """The source dialect provided on the command-line, if any."""
197
-
198
- def __init__(self, config: TranspileConfig | None, prompts: Prompts) -> None:
195
+ _transpiler_repository: TranspilerRepository
196
+ """The repository where available transpilers are installed."""
197
+
198
+ def __init__(
199
+ self,
200
+ config: TranspileConfig | None,
201
+ prompts: Prompts,
202
+ transpiler_repository: TranspilerRepository,
203
+ ) -> None:
199
204
  if config is None:
200
205
  logger.warning(
201
206
  "No workspace transpile configuration, use 'install-transpile' to (re)install and configure; using defaults for now."
@@ -203,6 +208,7 @@ class _TranspileConfigChecker:
203
208
  config = TranspileConfig()
204
209
  self._config = config
205
210
  self._prompts = prompts
211
+ self._transpiler_repository = transpiler_repository
206
212
  self._source_dialect_override = None
207
213
 
208
214
  @staticmethod
@@ -334,7 +340,7 @@ class _TranspileConfigChecker:
334
340
  def _configure_transpiler_config_path(self, source_dialect: str) -> TranspileEngine | None:
335
341
  """Configure the transpiler config path based on the requested source dialect."""
336
342
  # Names of compatible transpiler engines for the given dialect.
337
- compatible_transpilers = TranspilerInstaller.transpilers_with_dialect(source_dialect)
343
+ compatible_transpilers = self._transpiler_repository.transpilers_with_dialect(source_dialect)
338
344
  match len(compatible_transpilers):
339
345
  case 0:
340
346
  # Nothing found for the specified dialect, fail.
@@ -349,7 +355,7 @@ class _TranspileConfigChecker:
349
355
  f"Multiple transpilers available for dialect {source_dialect!r}: {compatible_transpilers!r}"
350
356
  )
351
357
  transpiler_name = self._prompts.choice("Select the transpiler:", list(compatible_transpilers))
352
- transpiler_config_path = TranspilerInstaller.transpiler_config_path(transpiler_name)
358
+ transpiler_config_path = self._transpiler_repository.transpiler_config_path(transpiler_name)
353
359
  logger.info(f"Lakebridge will use the {transpiler_name} transpiler.")
354
360
  self._config = dataclasses.replace(self._config, transpiler_config_path=str(transpiler_config_path))
355
361
  return TranspileEngine.load_engine(transpiler_config_path)
@@ -361,7 +367,7 @@ class _TranspileConfigChecker:
361
367
  if engine is None:
362
368
  engine = self._configure_transpiler_config_path(source_dialect)
363
369
  if engine is None:
364
- supported_dialects = ", ".join(TranspilerInstaller.all_dialects())
370
+ supported_dialects = ", ".join(self._transpiler_repository.all_dialects())
365
371
  msg = f"{msg_prefix}: {source_dialect!r} (supported dialects: {supported_dialects})"
366
372
  raise_validation_exception(msg)
367
373
  else:
@@ -375,7 +381,7 @@ class _TranspileConfigChecker:
375
381
 
376
382
  def _prompt_source_dialect(self) -> TranspileEngine:
377
383
  # This is similar to the post-install prompting for the source dialect.
378
- supported_dialects = TranspilerInstaller.all_dialects()
384
+ supported_dialects = self._transpiler_repository.all_dialects()
379
385
  match len(supported_dialects):
380
386
  case 0:
381
387
  msg = "No transpilers are available, install using 'install-transpile' or use --transpiler-conf-path'."
@@ -522,7 +528,7 @@ async def _transpile(ctx: ApplicationContext, config: TranspileConfig, engine: T
522
528
  return [status]
523
529
 
524
530
 
525
- def _override_workspace_client_config(ctx: ApplicationContext, overrides: dict[str, str] | None):
531
+ def _override_workspace_client_config(ctx: ApplicationContext, overrides: dict[str, str] | None) -> None:
526
532
  """
527
533
  Override the Workspace client's SDK config with the user provided SDK config.
528
534
  Users can provide the cluster_id and warehouse_id during the installation.
@@ -541,7 +547,7 @@ def _override_workspace_client_config(ctx: ApplicationContext, overrides: dict[s
541
547
 
542
548
 
543
549
  @lakebridge.command
544
- def reconcile(w: WorkspaceClient):
550
+ def reconcile(w: WorkspaceClient) -> None:
545
551
  """[EXPERIMENTAL] Reconciles source to Databricks datasets"""
546
552
  with_user_agent_extra("cmd", "execute-reconcile")
547
553
  ctx = ApplicationContext(w)
@@ -557,7 +563,7 @@ def reconcile(w: WorkspaceClient):
557
563
 
558
564
 
559
565
  @lakebridge.command
560
- def aggregates_reconcile(w: WorkspaceClient):
566
+ def aggregates_reconcile(w: WorkspaceClient) -> None:
561
567
  """[EXPERIMENTAL] Reconciles Aggregated source to Databricks datasets"""
562
568
  with_user_agent_extra("cmd", "execute-aggregates-reconcile")
563
569
  ctx = ApplicationContext(w)
@@ -574,7 +580,13 @@ def aggregates_reconcile(w: WorkspaceClient):
574
580
 
575
581
 
576
582
  @lakebridge.command
577
- def generate_lineage(w: WorkspaceClient, *, source_dialect: str | None = None, input_source: str, output_folder: str):
583
+ def generate_lineage(
584
+ w: WorkspaceClient,
585
+ *,
586
+ source_dialect: str | None = None,
587
+ input_source: str,
588
+ output_folder: str,
589
+ ) -> None:
578
590
  """[Experimental] Generates a lineage of source SQL files or folder"""
579
591
  ctx = ApplicationContext(w)
580
592
  logger.debug(f"User: {ctx.current_user}")
@@ -595,7 +607,7 @@ def generate_lineage(w: WorkspaceClient, *, source_dialect: str | None = None, i
595
607
 
596
608
 
597
609
  @lakebridge.command
598
- def configure_secrets(w: WorkspaceClient):
610
+ def configure_secrets(w: WorkspaceClient) -> None:
599
611
  """Setup reconciliation connection profile details as Secrets on Databricks Workspace"""
600
612
  recon_conf = ReconConfigPrompts(w)
601
613
 
@@ -607,7 +619,7 @@ def configure_secrets(w: WorkspaceClient):
607
619
 
608
620
 
609
621
  @lakebridge.command(is_unauthenticated=True)
610
- def configure_database_profiler():
622
+ def configure_database_profiler() -> None:
611
623
  """[Experimental] Install the lakebridge Assessment package"""
612
624
  prompts = Prompts()
613
625
 
@@ -622,32 +634,40 @@ def configure_database_profiler():
622
634
 
623
635
 
624
636
  @lakebridge.command()
625
- def install_transpile(w: WorkspaceClient, artifact: str | None = None):
637
+ def install_transpile(
638
+ w: WorkspaceClient,
639
+ artifact: str | None = None,
640
+ transpiler_repository: TranspilerRepository = TranspilerRepository.user_home(),
641
+ ) -> None:
626
642
  """Install the Lakebridge transpilers"""
627
643
  with_user_agent_extra("cmd", "install-transpile")
628
644
  if artifact:
629
645
  with_user_agent_extra("artifact-overload", Path(artifact).name)
630
646
  user = w.current_user
631
647
  logger.debug(f"User: {user}")
632
- installer = _installer(w)
648
+ installer = _installer(w, transpiler_repository)
633
649
  installer.run(module="transpile", artifact=artifact)
634
650
 
635
651
 
636
652
  @lakebridge.command(is_unauthenticated=False)
637
- def configure_reconcile(w: WorkspaceClient):
653
+ def configure_reconcile(
654
+ w: WorkspaceClient,
655
+ transpiler_repository: TranspilerRepository = TranspilerRepository.user_home(),
656
+ ) -> None:
638
657
  """Configure the Lakebridge reconciliation module"""
639
658
  with_user_agent_extra("cmd", "configure-reconcile")
640
659
  user = w.current_user
641
660
  logger.debug(f"User: {user}")
642
- dbsql_id = _create_warehouse(w)
643
- w.config.warehouse_id = dbsql_id
644
- installer = _installer(w)
661
+ if not w.config.warehouse_id:
662
+ dbsql_id = _create_warehouse(w)
663
+ w.config.warehouse_id = dbsql_id
664
+ logger.debug(f"Warehouse ID used for configuring reconcile: {w.config.warehouse_id}.")
665
+ installer = _installer(w, transpiler_repository)
645
666
  installer.run(module="reconcile")
646
- _remove_warehouse(w, dbsql_id)
647
667
 
648
668
 
649
669
  @lakebridge.command()
650
- def analyze(w: WorkspaceClient, source_directory: str, report_file: str, source_tech: str | None = None):
670
+ def analyze(w: WorkspaceClient, source_directory: str, report_file: str, source_tech: str | None = None) -> None:
651
671
  """Run the Analyzer"""
652
672
  with_user_agent_extra("cmd", "analyze")
653
673
  ctx = ApplicationContext(w)
@@ -185,6 +185,6 @@ class ReconcileConfig:
185
185
 
186
186
 
187
187
  @dataclass
188
- class RemorphConfigs:
188
+ class LakebridgeConfiguration:
189
189
  transpile: TranspileConfig | None = None
190
190
  reconcile: ReconcileConfig | None = None
@@ -12,7 +12,7 @@ from databricks.sdk.config import Config
12
12
  from databricks.sdk.errors import NotFound
13
13
  from databricks.sdk.service.iam import User
14
14
 
15
- from databricks.labs.lakebridge.config import TranspileConfig, ReconcileConfig, RemorphConfigs
15
+ from databricks.labs.lakebridge.config import TranspileConfig, ReconcileConfig, LakebridgeConfiguration
16
16
  from databricks.labs.lakebridge.deployment.configurator import ResourceConfigurator
17
17
  from databricks.labs.lakebridge.deployment.dashboard import DashboardDeployment
18
18
  from databricks.labs.lakebridge.deployment.installation import WorkspaceInstallation
@@ -42,7 +42,7 @@ class ApplicationContext:
42
42
 
43
43
  @cached_property
44
44
  def product_info(self) -> ProductInfo:
45
- return ProductInfo.from_class(RemorphConfigs)
45
+ return ProductInfo.from_class(LakebridgeConfiguration)
46
46
 
47
47
  @cached_property
48
48
  def installation(self) -> Installation:
@@ -65,8 +65,8 @@ class ApplicationContext:
65
65
  return None
66
66
 
67
67
  @cached_property
68
- def remorph_config(self) -> RemorphConfigs:
69
- return RemorphConfigs(transpile=self.transpile_config, reconcile=self.recon_config)
68
+ def remorph_config(self) -> LakebridgeConfiguration:
69
+ return LakebridgeConfiguration(transpile=self.transpile_config, reconcile=self.recon_config)
70
70
 
71
71
  @cached_property
72
72
  def connect_config(self) -> Config:
@@ -54,7 +54,8 @@ class DashboardDeployment:
54
54
 
55
55
  valid_dashboard_refs = set()
56
56
  for dashboard_folder in folder.iterdir():
57
- if not dashboard_folder.is_dir():
57
+ # Make sure the directory contains a dashboard
58
+ if not (dashboard_folder.is_dir() and dashboard_folder.joinpath("dashboard.yml").exists()):
58
59
  continue
59
60
  valid_dashboard_refs.add(self._dashboard_reference(dashboard_folder))
60
61
  dashboard = self._update_or_create_dashboard(dashboard_folder, parent_path, config.metadata_config)
@@ -11,7 +11,7 @@ from databricks.sdk.errors import NotFound
11
11
  from databricks.sdk.mixins.compute import SemVer
12
12
  from databricks.sdk.errors.platform import InvalidParameterValue, ResourceDoesNotExist
13
13
 
14
- from databricks.labs.lakebridge.config import RemorphConfigs
14
+ from databricks.labs.lakebridge.config import LakebridgeConfiguration
15
15
  from databricks.labs.lakebridge.deployment.recon import ReconDeployment
16
16
 
17
17
  logger = logging.getLogger("databricks.labs.lakebridge.install")
@@ -54,14 +54,14 @@ class WorkspaceInstallation:
54
54
  return Version(
55
55
  version=local_installed_version,
56
56
  date=local_installed_date,
57
- wheel=f"databricks_labs_remorph-{local_installed_version}-py3-none-any.whl",
57
+ wheel=f"databricks_labs_lakebridge-{local_installed_version}-py3-none-any.whl",
58
58
  )
59
59
 
60
60
  def _get_ws_version(self):
61
61
  try:
62
62
  return self._installation.load(Version)
63
- except ResourceDoesNotExist as err:
64
- logger.warning(f"Unable to get Workspace Version due to: {err}")
63
+ except ResourceDoesNotExist:
64
+ logger.debug("No existing version found in workspace; assuming fresh installation.")
65
65
  return None
66
66
 
67
67
  def _apply_upgrades(self):
@@ -91,21 +91,21 @@ class WorkspaceInstallation:
91
91
  wheel_paths = [f"/Workspace{wheel}" for wheel in wheel_paths]
92
92
  return wheel_paths
93
93
 
94
- def install(self, config: RemorphConfigs):
94
+ def install(self, config: LakebridgeConfiguration):
95
95
  self._apply_upgrades()
96
96
  wheel_paths: list[str] = self._upload_wheel()
97
97
  if config.reconcile:
98
- logger.info("Installing Remorph reconcile Metadata components.")
98
+ logger.info("Installing Lakebridge reconcile Metadata components.")
99
99
  self._recon_deployment.install(config.reconcile, wheel_paths)
100
100
 
101
- def uninstall(self, config: RemorphConfigs):
102
- # This will remove all the Remorph modules
101
+ def uninstall(self, config: LakebridgeConfiguration):
102
+ # This will remove all the Lakebridge modules
103
103
  if not self._prompts.confirm(
104
- "Do you want to uninstall Remorph from the workspace too, this would "
105
- "remove Remorph project folder, jobs, metadata and dashboards"
104
+ "Do you want to uninstall Lakebridge from the workspace too, this would "
105
+ "remove Lakebridge project folder, jobs, metadata and dashboards"
106
106
  ):
107
107
  return
108
- logger.info(f"Uninstalling Remorph from {self._ws.config.host}.")
108
+ logger.info(f"Uninstalling Lakebridge from {self._ws.config.host}.")
109
109
  try:
110
110
  self._installation.files()
111
111
  except NotFound:
@@ -2,7 +2,6 @@ import re
2
2
  import abc
3
3
  import dataclasses
4
4
  import shutil
5
- from collections.abc import Iterable
6
5
  from json import loads, dump
7
6
  import logging
8
7
  import os
@@ -30,16 +29,14 @@ from databricks.labs.lakebridge.config import (
30
29
  TranspileConfig,
31
30
  ReconcileConfig,
32
31
  DatabaseConfig,
33
- RemorphConfigs,
32
+ LakebridgeConfiguration,
34
33
  ReconcileMetadataConfig,
35
- LSPConfigOptionV1,
36
34
  )
37
-
38
35
  from databricks.labs.lakebridge.deployment.configurator import ResourceConfigurator
39
36
  from databricks.labs.lakebridge.deployment.installation import WorkspaceInstallation
40
37
  from databricks.labs.lakebridge.helpers.file_utils import chdir
41
38
  from databricks.labs.lakebridge.reconcile.constants import ReconReportType, ReconSourceType
42
- from databricks.labs.lakebridge.transpiler.lsp.lsp_engine import LSPConfig
39
+ from databricks.labs.lakebridge.transpiler.repository import TranspilerRepository
43
40
 
44
41
  logger = logging.getLogger(__name__)
45
42
 
@@ -47,39 +44,8 @@ TRANSPILER_WAREHOUSE_PREFIX = "Lakebridge Transpiler Validation"
47
44
 
48
45
 
49
46
  class TranspilerInstaller(abc.ABC):
50
-
51
- @classmethod
52
- def labs_path(cls) -> Path:
53
- return Path.home() / ".databricks" / "labs"
54
-
55
- @classmethod
56
- def transpilers_path(cls) -> Path:
57
- return cls.labs_path() / "remorph-transpilers"
58
-
59
- @classmethod
60
- def install_from_pypi(cls, product_name: str, pypi_name: str, artifact: Path | None = None) -> Path | None:
61
- installer = WheelInstaller(product_name, pypi_name, artifact)
62
- return installer.install()
63
-
64
- @classmethod
65
- def install_from_maven(
66
- cls, product_name: str, group_id: str, artifact_id: str, artifact: Path | None = None
67
- ) -> Path | None:
68
- installer = MavenInstaller(product_name, group_id, artifact_id, artifact)
69
- return installer.install()
70
-
71
- @classmethod
72
- def get_installed_version(cls, product_name: str, is_transpiler=True) -> str | None:
73
- product_path = (cls.transpilers_path() if is_transpiler else cls.labs_path()) / product_name
74
- current_version_path = product_path / "state" / "version.json"
75
- if not current_version_path.exists():
76
- return None
77
- text = current_version_path.read_text("utf-8")
78
- data: dict[str, Any] = loads(text)
79
- version: str | None = data.get("version", None)
80
- if not version or not version.startswith("v"):
81
- return None
82
- return version[1:]
47
+ def __init__(self, repository: TranspilerRepository) -> None:
48
+ self._repository = repository
83
49
 
84
50
  _version_pattern = re.compile(r"[_-](\d+(?:[.\-_]\w*\d+)+)")
85
51
 
@@ -99,65 +65,6 @@ class TranspilerInstaller(abc.ABC):
99
65
  group = group[:-4]
100
66
  return group
101
67
 
102
- @classmethod
103
- def all_transpiler_configs(cls) -> dict[str, LSPConfig]:
104
- all_configs = cls._all_transpiler_configs()
105
- return {config.name: config for config in all_configs}
106
-
107
- @classmethod
108
- def all_transpiler_names(cls) -> set[str]:
109
- all_configs = cls.all_transpiler_configs()
110
- return set(all_configs.keys())
111
-
112
- @classmethod
113
- def all_dialects(cls) -> set[str]:
114
- all_dialects: set[str] = set()
115
- for config in cls._all_transpiler_configs():
116
- all_dialects = all_dialects.union(config.remorph.dialects)
117
- return all_dialects
118
-
119
- @classmethod
120
- def transpilers_with_dialect(cls, dialect: str) -> set[str]:
121
- configs = filter(lambda cfg: dialect in cfg.remorph.dialects, cls.all_transpiler_configs().values())
122
- return set(config.name for config in configs)
123
-
124
- @classmethod
125
- def transpiler_config_path(cls, transpiler_name) -> Path:
126
- config = cls.all_transpiler_configs().get(transpiler_name, None)
127
- if not config:
128
- raise ValueError(f"No such transpiler: {transpiler_name}")
129
- return config.path
130
-
131
- @classmethod
132
- def transpiler_config_options(cls, transpiler_name, source_dialect) -> list[LSPConfigOptionV1]:
133
- config = cls.all_transpiler_configs().get(transpiler_name, None)
134
- if not config:
135
- return [] # gracefully returns an empty list, since this can only happen during testing
136
- return config.options_for_dialect(source_dialect)
137
-
138
- @classmethod
139
- def _all_transpiler_configs(cls) -> Iterable[LSPConfig]:
140
- path = cls.transpilers_path()
141
- if path.exists():
142
- all_files = os.listdir(path)
143
- for file in all_files:
144
- config = cls._transpiler_config(cls.transpilers_path() / file)
145
- if config:
146
- yield config
147
-
148
- @classmethod
149
- def _transpiler_config(cls, path: Path) -> LSPConfig | None:
150
- if not path.is_dir() or not (path / "lib").is_dir():
151
- return None
152
- config_path = path / "lib" / "config.yml"
153
- if not config_path.is_file():
154
- return None
155
- try:
156
- return LSPConfig.load(config_path)
157
- except ValueError as e:
158
- logger.error(f"Could not load config: {path!s}", exc_info=e)
159
- return None
160
-
161
68
  @classmethod
162
69
  def _store_product_state(cls, product_path: Path, version: str) -> None:
163
70
  state_path = product_path / "state"
@@ -182,7 +89,14 @@ class WheelInstaller(TranspilerInstaller):
182
89
  logger.error(f"Error while fetching PyPI metadata: {product_name}", exc_info=e)
183
90
  return None
184
91
 
185
- def __init__(self, product_name: str, pypi_name: str, artifact: Path | None = None):
92
+ def __init__(
93
+ self,
94
+ repository: TranspilerRepository,
95
+ product_name: str,
96
+ pypi_name: str,
97
+ artifact: Path | None = None,
98
+ ) -> None:
99
+ super().__init__(repository)
186
100
  self._product_name = product_name
187
101
  self._pypi_name = pypi_name
188
102
  self._artifact = artifact
@@ -200,7 +114,7 @@ class WheelInstaller(TranspilerInstaller):
200
114
  logger.warning(f"Could not determine the latest version of {self._pypi_name}")
201
115
  logger.error(f"Failed to install transpiler: {self._product_name}")
202
116
  return None
203
- installed_version = self.get_installed_version(self._product_name)
117
+ installed_version = self._repository.get_installed_version(self._product_name)
204
118
  if installed_version == latest_version:
205
119
  logger.info(f"{self._pypi_name} v{latest_version} already installed")
206
120
  return None
@@ -208,14 +122,12 @@ class WheelInstaller(TranspilerInstaller):
208
122
 
209
123
  def _install_latest_version(self, version: str) -> Path | None:
210
124
  logger.info(f"Installing Databricks {self._product_name} transpiler v{version}")
211
- # use type(self) to workaround a mock bug on class methods
212
- self._product_path = type(self).transpilers_path() / self._product_name
125
+ self._product_path = self._repository.transpilers_path() / self._product_name
213
126
  backup_path = Path(f"{self._product_path!s}-saved")
214
127
  if self._product_path.exists():
215
128
  os.rename(self._product_path, backup_path)
216
- self._product_path.mkdir(parents=True, exist_ok=True)
217
129
  self._install_path = self._product_path / "lib"
218
- self._install_path.mkdir(exist_ok=True)
130
+ self._install_path.mkdir(parents=True, exist_ok=True)
219
131
  try:
220
132
  result = self._unsafe_install_latest_version(version)
221
133
  logger.info(f"Successfully installed {self._pypi_name} v{version}")
@@ -416,7 +328,15 @@ class MavenInstaller(TranspilerInstaller):
416
328
  logger.info(f"Successfully installed: {group_id}:{artifact_id}:{version}")
417
329
  return True
418
330
 
419
- def __init__(self, product_name: str, group_id: str, artifact_id: str, artifact: Path | None = None):
331
+ def __init__(
332
+ self,
333
+ repository: TranspilerRepository,
334
+ product_name: str,
335
+ group_id: str,
336
+ artifact_id: str,
337
+ artifact: Path | None = None,
338
+ ) -> None:
339
+ super().__init__(repository)
420
340
  self._product_name = product_name
421
341
  self._group_id = group_id
422
342
  self._artifact_id = artifact_id
@@ -434,7 +354,7 @@ class MavenInstaller(TranspilerInstaller):
434
354
  logger.warning(f"Could not determine the latest version of Databricks {self._product_name} transpiler")
435
355
  logger.error("Failed to install transpiler: Databricks {self._product_name} transpiler")
436
356
  return None
437
- installed_version = self.get_installed_version(self._product_name)
357
+ installed_version = self._repository.get_installed_version(self._product_name)
438
358
  if installed_version == latest_version:
439
359
  logger.info(f"Databricks {self._product_name} transpiler v{latest_version} already installed")
440
360
  return None
@@ -442,8 +362,7 @@ class MavenInstaller(TranspilerInstaller):
442
362
 
443
363
  def _install_version(self, version: str) -> Path | None:
444
364
  logger.info(f"Installing Databricks {self._product_name} transpiler v{version}")
445
- # use type(self) to workaround a mock bug on class methods
446
- self._product_path = type(self).transpilers_path() / self._product_name
365
+ self._product_path = self._repository.transpilers_path() / self._product_name
447
366
  backup_path = Path(f"{self._product_path!s}-saved")
448
367
  if backup_path.exists():
449
368
  rmtree(backup_path)
@@ -495,6 +414,7 @@ class WorkspaceInstaller:
495
414
  resource_configurator: ResourceConfigurator,
496
415
  workspace_installation: WorkspaceInstallation,
497
416
  environ: dict[str, str] | None = None,
417
+ transpiler_repository: TranspilerRepository = TranspilerRepository.user_home(),
498
418
  ):
499
419
  self._ws = ws
500
420
  self._prompts = prompts
@@ -503,6 +423,7 @@ class WorkspaceInstaller:
503
423
  self._product_info = product_info
504
424
  self._resource_configurator = resource_configurator
505
425
  self._ws_installation = workspace_installation
426
+ self._transpiler_repository = transpiler_repository
506
427
 
507
428
  if not environ:
508
429
  environ = dict(os.environ.items())
@@ -511,7 +432,9 @@ class WorkspaceInstaller:
511
432
  msg = "WorkspaceInstaller is not supposed to be executed in Databricks Runtime"
512
433
  raise SystemExit(msg)
513
434
 
514
- def run(self, module: str, config: RemorphConfigs | None = None, artifact: str | None = None) -> RemorphConfigs:
435
+ def run(
436
+ self, module: str, config: LakebridgeConfiguration | None = None, artifact: str | None = None
437
+ ) -> LakebridgeConfiguration:
515
438
  logger.debug(f"Initializing workspace installation for module: {module} (config: {config})")
516
439
  if module == "transpile" and artifact:
517
440
  self.install_artifact(artifact)
@@ -526,15 +449,14 @@ class WorkspaceInstaller:
526
449
  logger.info("Installation completed successfully! Please refer to the documentation for the next steps.")
527
450
  return config
528
451
 
529
- @classmethod
530
- def install_bladebridge(cls, artifact: Path | None = None):
452
+ def install_bladebridge(self, artifact: Path | None = None) -> None:
531
453
  local_name = "bladebridge"
532
454
  pypi_name = "databricks-bb-plugin"
533
- TranspilerInstaller.install_from_pypi(local_name, pypi_name, artifact)
455
+ wheel_installer = WheelInstaller(self._transpiler_repository, local_name, pypi_name, artifact)
456
+ wheel_installer.install()
534
457
 
535
- @classmethod
536
- def install_morpheus(cls, artifact: Path | None = None):
537
- if not cls.is_java_version_okay():
458
+ def install_morpheus(self, artifact: Path | None = None) -> None:
459
+ if not self.is_java_version_okay():
538
460
  logger.error(
539
461
  "The morpheus transpiler requires Java 11 or above. Please install Java and re-run 'install-transpile'."
540
462
  )
@@ -542,7 +464,8 @@ class WorkspaceInstaller:
542
464
  product_name = "databricks-morph-plugin"
543
465
  group_id = "com.databricks.labs"
544
466
  artifact_id = product_name
545
- TranspilerInstaller.install_from_maven(product_name, group_id, artifact_id, artifact)
467
+ maven_installer = MavenInstaller(self._transpiler_repository, product_name, group_id, artifact_id, artifact)
468
+ maven_installer.install()
546
469
 
547
470
  @classmethod
548
471
  def is_java_version_okay(cls) -> bool:
@@ -564,16 +487,15 @@ class WorkspaceInstaller:
564
487
  case _:
565
488
  return True
566
489
 
567
- @classmethod
568
- def install_artifact(cls, artifact: str):
490
+ def install_artifact(self, artifact: str):
569
491
  path = Path(artifact)
570
492
  if not path.exists():
571
493
  logger.error(f"Could not locate artifact {artifact}")
572
494
  return
573
495
  if "databricks-morph-plugin" in path.name:
574
- cls.install_morpheus(path)
496
+ self.install_morpheus(path)
575
497
  elif "databricks_bb_plugin" in path.name:
576
- cls.install_bladebridge(path)
498
+ self.install_bladebridge(path)
577
499
  else:
578
500
  logger.fatal(f"Cannot install unsupported artifact: {artifact}")
579
501
 
@@ -637,17 +559,17 @@ class WorkspaceInstaller:
637
559
  patch = int(match["patch"] or 0)
638
560
  return feature, interim, update, patch
639
561
 
640
- def configure(self, module: str) -> RemorphConfigs:
562
+ def configure(self, module: str) -> LakebridgeConfiguration:
641
563
  match module:
642
564
  case "transpile":
643
565
  logger.info("Configuring lakebridge `transpile`.")
644
- return RemorphConfigs(self._configure_transpile(), None)
566
+ return LakebridgeConfiguration(self._configure_transpile(), None)
645
567
  case "reconcile":
646
568
  logger.info("Configuring lakebridge `reconcile`.")
647
- return RemorphConfigs(None, self._configure_reconcile())
569
+ return LakebridgeConfiguration(None, self._configure_reconcile())
648
570
  case "all":
649
571
  logger.info("Configuring lakebridge `transpile` and `reconcile`.")
650
- return RemorphConfigs(
572
+ return LakebridgeConfiguration(
651
573
  self._configure_transpile(),
652
574
  self._configure_reconcile(),
653
575
  )
@@ -697,19 +619,19 @@ class WorkspaceInstaller:
697
619
  return config
698
620
 
699
621
  def _all_installed_dialects(self) -> list[str]:
700
- return sorted(TranspilerInstaller.all_dialects())
622
+ return sorted(self._transpiler_repository.all_dialects())
701
623
 
702
624
  def _transpilers_with_dialect(self, dialect: str) -> list[str]:
703
- return sorted(TranspilerInstaller.transpilers_with_dialect(dialect))
625
+ return sorted(self._transpiler_repository.transpilers_with_dialect(dialect))
704
626
 
705
627
  def _transpiler_config_path(self, transpiler: str) -> Path:
706
- return TranspilerInstaller.transpiler_config_path(transpiler)
628
+ return self._transpiler_repository.transpiler_config_path(transpiler)
707
629
 
708
630
  def _prompt_for_new_transpile_installation(self) -> TranspileConfig:
709
631
  install_later = "Set it later"
710
632
  # TODO tidy this up, logger might not display the below in console...
711
633
  logger.info("Please answer a few questions to configure lakebridge `transpile`")
712
- all_dialects = [install_later] + self._all_installed_dialects()
634
+ all_dialects = [install_later, *self._all_installed_dialects()]
713
635
  source_dialect: str | None = self._prompts.choice("Select the source dialect:", all_dialects, sort=False)
714
636
  if source_dialect == install_later:
715
637
  source_dialect = None
@@ -760,14 +682,12 @@ class WorkspaceInstaller:
760
682
  )
761
683
 
762
684
  def _prompt_for_transpiler_options(self, transpiler_name: str, source_dialect: str) -> dict[str, Any] | None:
763
- config_options = TranspilerInstaller.transpiler_config_options(transpiler_name, source_dialect)
685
+ config_options = self._transpiler_repository.transpiler_config_options(transpiler_name, source_dialect)
764
686
  if len(config_options) == 0:
765
687
  return None
766
688
  return {option.flag: option.prompt_for_value(self._prompts) for option in config_options}
767
689
 
768
- def _configure_catalog(
769
- self,
770
- ) -> str:
690
+ def _configure_catalog(self) -> str:
771
691
  return self._resource_configurator.prompt_for_catalog_setup()
772
692
 
773
693
  def _configure_schema(
@@ -49,8 +49,9 @@ class TranspilingContext:
49
49
 
50
50
 
51
51
  async def _process_one_file(context: TranspilingContext) -> tuple[int, list[TranspileError]]:
52
+ input_path = context.input_path
52
53
 
53
- logger.debug(f"Started processing file: {context.input_path!s}")
54
+ logger.debug(f"Started processing file: {input_path}")
54
55
 
55
56
  if not context.config.source_dialect:
56
57
  error = TranspileError(
@@ -62,7 +63,11 @@ async def _process_one_file(context: TranspilingContext) -> tuple[int, list[Tran
62
63
  )
63
64
  return 0, [error]
64
65
 
65
- source_code = read_text(context.input_path)
66
+ # Check if it looks like XML, where we need to sniff the encoding instead of relying on a BOM or defaulting to the
67
+ # local platform encoding.
68
+ sniff_xml_encoding = input_path.suffix.lower() == ".xml"
69
+
70
+ source_code = read_text(input_path, detect_xml=sniff_xml_encoding)
66
71
  context = dataclasses.replace(context, source_code=source_code)
67
72
 
68
73
  transpile_result = await _transpile(
@@ -532,18 +532,18 @@ class LSPEngine(TranspileEngine):
532
532
 
533
533
  def open_document(self, file_path: Path, source_code: str) -> None:
534
534
  text_document = TextDocumentItem(
535
- uri=file_path.as_uri(), language_id=LanguageKind.Sql, version=1, text=source_code
535
+ uri=file_path.absolute().as_uri(), language_id=LanguageKind.Sql, version=1, text=source_code
536
536
  )
537
537
  params = DidOpenTextDocumentParams(text_document)
538
538
  self._client.text_document_did_open(params)
539
539
 
540
540
  def close_document(self, file_path: Path) -> None:
541
- text_document = TextDocumentIdentifier(uri=file_path.as_uri())
541
+ text_document = TextDocumentIdentifier(uri=file_path.absolute().as_uri())
542
542
  params = DidCloseTextDocumentParams(text_document)
543
543
  self._client.text_document_did_close(params)
544
544
 
545
545
  async def transpile_document(self, file_path: Path) -> TranspileDocumentResult:
546
- params = TranspileDocumentParams(uri=file_path.as_uri(), language_id=LanguageKind.Sql)
546
+ params = TranspileDocumentParams(uri=file_path.absolute().as_uri(), language_id=LanguageKind.Sql)
547
547
  result = await self._client.transpile_document_async(params)
548
548
  return result
549
549
 
@@ -0,0 +1,123 @@
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import Iterable
4
+ from json import loads
5
+ import logging
6
+ import os
7
+ from typing import Any
8
+ from pathlib import Path
9
+
10
+ from databricks.labs.lakebridge.config import LSPConfigOptionV1
11
+
12
+ from databricks.labs.lakebridge.transpiler.lsp.lsp_engine import LSPConfig
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class TranspilerRepository:
18
+ """
19
+ Repository for managing the installed transpilers in the user's home directory.
20
+
21
+ The default repository for a user is always located under ~/.databricks/labs, and can be obtained
22
+ via the `TranspilerRepository.user_home()` method.
23
+ """
24
+
25
+ @staticmethod
26
+ def default_labs_path() -> Path:
27
+ """Return the default path where labs applications are installed."""
28
+ return Path.home() / ".databricks" / "labs"
29
+
30
+ _default_repository: TranspilerRepository | None = None
31
+
32
+ @classmethod
33
+ def user_home(cls) -> TranspilerRepository:
34
+ """The default repository for transpilers in the current user's home directory."""
35
+ repository = cls._default_repository
36
+ if repository is None:
37
+ cls._default_repository = repository = cls(cls.default_labs_path())
38
+ return repository
39
+
40
+ def __init__(self, labs_path: Path) -> None:
41
+ """Initialize the repository, based in the given location.
42
+
43
+ This should only be used directly by tests; for the default repository, use `TranspilerRepository.user_home()`.
44
+
45
+ Args:
46
+ labs_path: The path where the labs applications are installed.
47
+ """
48
+ if self._default_repository == self and labs_path == self.default_labs_path():
49
+ raise ValueError("Use TranspilerRepository.user_home() to get the default repository.")
50
+ self._labs_path = labs_path
51
+
52
+ def __repr__(self) -> str:
53
+ return f"TranspilerRepository(labs_path={self._labs_path!r})"
54
+
55
+ def transpilers_path(self) -> Path:
56
+ return self._labs_path / "remorph-transpilers"
57
+
58
+ def get_installed_version(self, product_name: str) -> str | None:
59
+ # Warning: product_name here (eg. 'morpheus') and transpiler_name elsewhere (eg. Morpheus) are not the same!
60
+ product_path = self.transpilers_path() / product_name
61
+ current_version_path = product_path / "state" / "version.json"
62
+ if not current_version_path.exists():
63
+ return None
64
+ text = current_version_path.read_text("utf-8")
65
+ data: dict[str, Any] = loads(text)
66
+ version: str | None = data.get("version", None)
67
+ if not version or not version.startswith("v"):
68
+ return None
69
+ return version[1:]
70
+
71
+ def all_transpiler_configs(self) -> dict[str, LSPConfig]:
72
+ all_configs = self._all_transpiler_configs()
73
+ return {config.name: config for config in all_configs}
74
+
75
+ def all_transpiler_names(self) -> set[str]:
76
+ all_configs = self.all_transpiler_configs()
77
+ return set(all_configs.keys())
78
+
79
+ def all_dialects(self) -> set[str]:
80
+ all_dialects: set[str] = set()
81
+ for config in self._all_transpiler_configs():
82
+ all_dialects = all_dialects.union(config.remorph.dialects)
83
+ return all_dialects
84
+
85
+ def transpilers_with_dialect(self, dialect: str) -> set[str]:
86
+ configs = filter(lambda cfg: dialect in cfg.remorph.dialects, self.all_transpiler_configs().values())
87
+ return set(config.name for config in configs)
88
+
89
+ def transpiler_config_path(self, transpiler_name: str) -> Path:
90
+ # Note: Can't just go straight to the directory: the transpiler names don't exactly match the directory names.
91
+ try:
92
+ config = next(c for c in self._all_transpiler_configs() if c.name == transpiler_name)
93
+ except StopIteration as e:
94
+ raise ValueError(f"No such transpiler: {transpiler_name}") from e
95
+ return config.path
96
+
97
+ def transpiler_config_options(self, transpiler_name: str, source_dialect: str) -> list[LSPConfigOptionV1]:
98
+ config = self.all_transpiler_configs().get(transpiler_name, None)
99
+ if not config:
100
+ return [] # gracefully returns an empty list, since this can only happen during testing
101
+ return config.options_for_dialect(source_dialect)
102
+
103
+ def _all_transpiler_configs(self) -> Iterable[LSPConfig]:
104
+ transpilers_path = self.transpilers_path()
105
+ if transpilers_path.exists():
106
+ all_files = os.listdir(transpilers_path)
107
+ for file in all_files:
108
+ config = self._transpiler_config(transpilers_path / file)
109
+ if config:
110
+ yield config
111
+
112
+ @classmethod
113
+ def _transpiler_config(cls, path: Path) -> LSPConfig | None:
114
+ if not path.is_dir() or not (path / "lib").is_dir():
115
+ return None
116
+ config_path = path / "lib" / "config.yml"
117
+ if not config_path.is_file():
118
+ return None
119
+ try:
120
+ return LSPConfig.load(config_path)
121
+ except ValueError as e:
122
+ logger.error(f"Could not load config: {path!s}", exc_info=e)
123
+ return None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: databricks-labs-lakebridge
3
- Version: 0.10.4
3
+ Version: 0.10.6
4
4
  Summary: Fast and predictable migrations to Databricks Lakehouse Platform. This tool is designed to help you migrate your data and workloads to the Databricks Lakehouse Platform in a fast, predictable, and reliable way. It provides a set of tools and utilities to help you reconcile your data and workloads, assess your current state, and plan your migration.
5
5
  Project-URL: Documentation, https://databrickslabs.github.io/lakebridge
6
6
  Project-URL: Issues, https://github.com/databrickslabs/lakebridge/issues
@@ -26,7 +26,7 @@ Classifier: Topic :: Utilities
26
26
  Requires-Python: >=3.10
27
27
  Requires-Dist: cryptography<45.1.0,>=44.0.2
28
28
  Requires-Dist: databricks-bb-analyzer~=0.1.9
29
- Requires-Dist: databricks-labs-blueprint[yaml]<0.12.0,>=0.11.2
29
+ Requires-Dist: databricks-labs-blueprint[yaml]<0.12.0,>=0.11.3
30
30
  Requires-Dist: databricks-labs-lsql==0.16.0
31
31
  Requires-Dist: databricks-sdk~=0.51.0
32
32
  Requires-Dist: duckdb~=1.2.2
@@ -8,12 +8,12 @@ docs/lakebridge/src/theme/Footer/index.tsx,sha256=Jj8zY5WDiTLXwF_mAgld8Dh1A3MY1H
8
8
  docs/lakebridge/src/theme/Layout/index.tsx,sha256=IkdLr13jKmLxT0jWQqrwqrjVXc8Rwd_kWNpTd1t2sc0,592
9
9
  databricks/__init__.py,sha256=YqH8Hy8lHJxd0hLMZF6kWirUDdPiX90LRDX6S6yTMn0,261
10
10
  databricks/labs/__init__.py,sha256=YqH8Hy8lHJxd0hLMZF6kWirUDdPiX90LRDX6S6yTMn0,261
11
- databricks/labs/lakebridge/__about__.py,sha256=YbaA9ccBU-BwJ67El953ndAg5kViw0qZOkui5tbsV8c,49
11
+ databricks/labs/lakebridge/__about__.py,sha256=EZvYlS7Q_tJB25XAPiyvWgXD5fLEm9L04cHE341CM7E,49
12
12
  databricks/labs/lakebridge/__init__.py,sha256=nUNECqNvyfpT0aeWwlqG0ADT8U8ScCLb8WWpLydppcA,464
13
13
  databricks/labs/lakebridge/base_install.py,sha256=8NxXsNpgqXnuADKXVFh5oQL3osdvygRMY1amJwKfU08,490
14
- databricks/labs/lakebridge/cli.py,sha256=zeibf44Sfu6gjL5lLGnKTXle4kIVBy4hHDuITVL5TxY,31805
15
- databricks/labs/lakebridge/config.py,sha256=IjxvphM9fRQHQ2FAxwZ23deJGgSemJ3rMV0sp1Ob6e8,5833
16
- databricks/labs/lakebridge/install.py,sha256=x8YQwX-EwSKGYKHeotzUwADfzBrvFeplDHbjifLq9mA,39757
14
+ databricks/labs/lakebridge/cli.py,sha256=pZ5QXGPyIFmrNsgaUKOSIQy9cZjuCtR-pYghde52LBU,32603
15
+ databricks/labs/lakebridge/config.py,sha256=iu5SHkRO-aIFeZdi5P5UXEgJsjyWY-GKFGO3kR5i4-s,5842
16
+ databricks/labs/lakebridge/install.py,sha256=ZaWKELr-s0Ah386GCQjCSyWE-1CAn3MM2tBJTsTHlPQ,36595
17
17
  databricks/labs/lakebridge/jvmproxy.py,sha256=F9pXpemzdaJXwpshHxVM9PYU_eNn4zTCUFQ5vc9WIhA,1573
18
18
  databricks/labs/lakebridge/lineage.py,sha256=Q2oky4RkODRHWMwIQIwbYXSdZTmRkMWwEh6RssBiQxY,1843
19
19
  databricks/labs/lakebridge/uninstall.py,sha256=hf36YgeW9XO2cRvvn6AXUZdihQ1ZMHnR38OVEF5sfRw,759
@@ -25,7 +25,7 @@ databricks/labs/lakebridge/connections/credential_manager.py,sha256=N8wKec2UO5P1
25
25
  databricks/labs/lakebridge/connections/database_manager.py,sha256=RLu9aUweko_sg11lVBU_PMRjaYBwv9JdDJAV77EuWZU,3073
26
26
  databricks/labs/lakebridge/connections/env_getter.py,sha256=HKczv9Qgyd7Bm3OiKFuE0wqwXNZ-NqkCiVA_k0H_y6s,322
27
27
  databricks/labs/lakebridge/contexts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
- databricks/labs/lakebridge/contexts/application.py,sha256=APlJ6ZQSLvMe-_G0TYwSBxN45sUXq9h-WyC5HCEX6Ic,4844
28
+ databricks/labs/lakebridge/contexts/application.py,sha256=MDOhsZnGx65UTpoEWus3mkSdHKRtxG423nwF8-JSgeE,4880
29
29
  databricks/labs/lakebridge/coverage/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
30
  databricks/labs/lakebridge/coverage/commons.py,sha256=WskVVa5l7aLVfSJgceCiDnfDsvJXwnb9eg27lMEj-HY,7515
31
31
  databricks/labs/lakebridge/coverage/lakebridge_snow_transpilation_coverage.py,sha256=QL92ei_5QIsuSSDmjpQ2qW5CN_9l08HpufedDjc1D-w,1085
@@ -34,8 +34,8 @@ databricks/labs/lakebridge/coverage/sqlglot_snow_transpilation_coverage.py,sha25
34
34
  databricks/labs/lakebridge/coverage/sqlglot_tsql_transpilation_coverage.py,sha256=9SypNpibaS9VGiTYVwCcMMmVWkBGj094cTBfbhsqZxQ,183
35
35
  databricks/labs/lakebridge/deployment/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
36
36
  databricks/labs/lakebridge/deployment/configurator.py,sha256=uOowc6cJfpZl4LGAs0eSFBBhrhC_t6Zs8Uo17htxXzY,8859
37
- databricks/labs/lakebridge/deployment/dashboard.py,sha256=mFQFuw9YR8vcabYxCoZkgo247gttMaVokN8kYrJwz-Q,6002
38
- databricks/labs/lakebridge/deployment/installation.py,sha256=qGpvceRGi1AVST2NEtLWsneI83mKpXEZfTn812VdMjI,5072
37
+ databricks/labs/lakebridge/deployment/dashboard.py,sha256=6q41Jplt6gh3vtjXCKB7g-RjA9wm0ugeqQVWWN7WEWw,6119
38
+ databricks/labs/lakebridge/deployment/installation.py,sha256=vQ6OjIjFh4ldSjChz93fhrAefQX3_VqyhrRemgBmMM4,5130
39
39
  databricks/labs/lakebridge/deployment/job.py,sha256=J0zZao279P0qBH7idaREvcT-SjPsEGOL7mBgQ_ZTaZI,6090
40
40
  databricks/labs/lakebridge/deployment/recon.py,sha256=gRwuYE8GEg69rnYHv5IPOOwqdhZ2UPfwm0fOSFTL7pE,6258
41
41
  databricks/labs/lakebridge/deployment/table.py,sha256=15pKlGRtGDiErq2lY3V-vMbbiKHSF4U-U3S6WvHeOA0,976
@@ -147,11 +147,12 @@ databricks/labs/lakebridge/resources/reconcile/queries/installation/details.sql,
147
147
  databricks/labs/lakebridge/resources/reconcile/queries/installation/main.sql,sha256=s_A0YyGSX_pCWnQsQnY65VYFcbNvq2qKJvYxU6zam6E,794
148
148
  databricks/labs/lakebridge/resources/reconcile/queries/installation/metrics.sql,sha256=FdvjQp7gCwsbcu4UrOuJN-bBLJFpvUIyxH6PQvg04Wo,1006
149
149
  databricks/labs/lakebridge/transpiler/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
150
- databricks/labs/lakebridge/transpiler/execute.py,sha256=7DpeIixATOPryyt4TD93-sdwE1C_fIwuo6bKwClaF_s,17007
150
+ databricks/labs/lakebridge/transpiler/execute.py,sha256=AgsOGQtmPfCoWYP9U1cYezY14UjGx6TPR_PU1NAY6sE,17268
151
+ databricks/labs/lakebridge/transpiler/repository.py,sha256=Iqe8Msf5_Boi2usel5NkWdsakk5rOYhIuapFHz--7cI,5066
151
152
  databricks/labs/lakebridge/transpiler/transpile_engine.py,sha256=5zC8fkpBBlt9RjE_BeA_Sd6vaRxA3mBdhTqoRGFTc_Y,1616
152
153
  databricks/labs/lakebridge/transpiler/transpile_status.py,sha256=MO-Ju-ki3FCY15WxgwfPV9EC7Ma9q8aIfSTgHAmnkGU,1715
153
154
  databricks/labs/lakebridge/transpiler/lsp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
154
- databricks/labs/lakebridge/transpiler/lsp/lsp_engine.py,sha256=osT4RXpYqBNcAQ8mcoFt8m2dygs5TcmYnQq57KN_kw4,22580
155
+ databricks/labs/lakebridge/transpiler/lsp/lsp_engine.py,sha256=3itkrYWYZdE6NWXz7L_iy6gg8yuS2BV7X4AptvppcIo,22613
155
156
  databricks/labs/lakebridge/transpiler/sqlglot/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
156
157
  databricks/labs/lakebridge/transpiler/sqlglot/dialect_utils.py,sha256=GhXXWGA_2PlmHKjxrjryZpA5xaVZ81Vrw3b7DzjpFFI,1033
157
158
  databricks/labs/lakebridge/transpiler/sqlglot/lca_utils.py,sha256=vpDLGhE-wFMah1VTXkMg6gI_QnzdzpYZf0h9DUd8zcI,5154
@@ -165,9 +166,9 @@ databricks/labs/lakebridge/transpiler/sqlglot/parsers/presto.py,sha256=bY6Ku8ZPW
165
166
  databricks/labs/lakebridge/transpiler/sqlglot/parsers/snowflake.py,sha256=dZ7BdOlBZlkbiN9G9bu4l2c456265Gx9WoWUPRa7Ffg,23203
166
167
  databricks/labs/lakebridge/upgrades/v0.4.0_add_main_table_operation_name_column.py,sha256=wMTbj1q5td4fa5DCk0tWFJ-OmhhzsExRLYUe4PKmk0s,3527
167
168
  databricks/labs/lakebridge/upgrades/v0.6.0_alter_metrics_datatype.py,sha256=hnTHRtqzwPSF5Judzh6ss-uB5h3IFtm2ylWduwRNq5Y,2424
168
- databricks_labs_lakebridge-0.10.4.dist-info/METADATA,sha256=DPgVGYnjOrTddEMWZRX2jXPHhHw6WBZJXviKtE5SQ14,3078
169
- databricks_labs_lakebridge-0.10.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
170
- databricks_labs_lakebridge-0.10.4.dist-info/entry_points.txt,sha256=Idr1CT73b8wShdr287yu1hheGbDbhBvucVUlZcbpiPo,75
171
- databricks_labs_lakebridge-0.10.4.dist-info/licenses/LICENSE,sha256=1hG0Cvw6mp9nL9qRoHFcCUk9fYqhcnj2vgJ75rt3BxA,3862
172
- databricks_labs_lakebridge-0.10.4.dist-info/licenses/NOTICE,sha256=wtxMsNvTkw1hAEkkWHz8A8JrYySAUSt1tOTcqddkWEg,1797
173
- databricks_labs_lakebridge-0.10.4.dist-info/RECORD,,
169
+ databricks_labs_lakebridge-0.10.6.dist-info/METADATA,sha256=mb2Jz0eC_Yds9f-Acyl1mYA-vl5RNz2ZTIBRAAjMD2Y,3078
170
+ databricks_labs_lakebridge-0.10.6.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
171
+ databricks_labs_lakebridge-0.10.6.dist-info/entry_points.txt,sha256=Idr1CT73b8wShdr287yu1hheGbDbhBvucVUlZcbpiPo,75
172
+ databricks_labs_lakebridge-0.10.6.dist-info/licenses/LICENSE,sha256=1hG0Cvw6mp9nL9qRoHFcCUk9fYqhcnj2vgJ75rt3BxA,3862
173
+ databricks_labs_lakebridge-0.10.6.dist-info/licenses/NOTICE,sha256=wtxMsNvTkw1hAEkkWHz8A8JrYySAUSt1tOTcqddkWEg,1797
174
+ databricks_labs_lakebridge-0.10.6.dist-info/RECORD,,