databricks-labs-lakebridge 0.10.3__py3-none-any.whl → 0.10.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,2 +1,2 @@
1
1
  # DO NOT MODIFY THIS FILE
2
- __version__ = "0.10.3"
2
+ __version__ = "0.10.4"
@@ -132,7 +132,7 @@ def transpile(
132
132
  logger.debug(f"Final configuration for transpilation: {config!r}")
133
133
 
134
134
  assert config.source_dialect is not None, "Source dialect has been validated by this point."
135
- with_user_agent_extra("transpiler_source_tech", config.source_dialect)
135
+ with_user_agent_extra("transpiler_source_tech", make_alphanum_or_semver(config.source_dialect))
136
136
  plugin_name = engine.transpiler_name
137
137
  plugin_name = re.sub(r"\s+", "_", plugin_name)
138
138
  with_user_agent_extra("transpiler_plugin_name", plugin_name)
@@ -647,19 +647,23 @@ def configure_reconcile(w: WorkspaceClient):
647
647
 
648
648
 
649
649
  @lakebridge.command()
650
- def analyze(w: WorkspaceClient, source_directory: str, report_file: str):
650
+ def analyze(w: WorkspaceClient, source_directory: str, report_file: str, source_tech: str | None = None):
651
651
  """Run the Analyzer"""
652
652
  with_user_agent_extra("cmd", "analyze")
653
653
  ctx = ApplicationContext(w)
654
654
  prompts = ctx.prompts
655
655
  output_file = report_file
656
656
  input_folder = source_directory
657
- source_tech = prompts.choice("Select the source technology", Analyzer.supported_source_technologies())
657
+ if source_tech is None:
658
+ source_tech = prompts.choice("Select the source technology", Analyzer.supported_source_technologies())
658
659
  with_user_agent_extra("analyzer_source_tech", make_alphanum_or_semver(source_tech))
659
660
  user = ctx.current_user
660
661
  logger.debug(f"User: {user}")
661
662
  is_debug = logger.getEffectiveLevel() == logging.DEBUG
662
663
  Analyzer.analyze(Path(input_folder), Path(output_file), source_tech, is_debug=is_debug)
664
+ logger.info(
665
+ f"Successfully Analyzed files in ${source_directory} for ${source_tech} and saved report to {report_file}"
666
+ )
663
667
 
664
668
 
665
669
  if __name__ == "__main__":
@@ -182,22 +182,6 @@ class WheelInstaller(TranspilerInstaller):
182
182
  logger.error(f"Error while fetching PyPI metadata: {product_name}", exc_info=e)
183
183
  return None
184
184
 
185
- @classmethod
186
- def download_artifact_from_pypi(cls, product_name: str, version: str, target: Path, extension="whl") -> int:
187
- suffix = "-py3-none-any.whl" if extension == "whl" else ".tar.gz" if extension == "tar" else f".{extension}"
188
- filename = f"{product_name.replace('-', '_')}-{version}{suffix}"
189
- url = f"https://pypi.debian.net/{product_name}/{filename}"
190
- try:
191
- path, _ = request.urlretrieve(url)
192
- logger.info(f"Successfully downloaded {path}")
193
- if not target.exists():
194
- logger.info(f"Moving {path} to {target!s}")
195
- move(path, target)
196
- return 0
197
- except URLError as e:
198
- logger.error("While downloading from pypi", exc_info=e)
199
- return -1
200
-
201
185
  def __init__(self, product_name: str, pypi_name: str, artifact: Path | None = None):
202
186
  self._product_name = product_name
203
187
  self._pypi_name = pypi_name
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: databricks-labs-lakebridge
3
- Version: 0.10.3
3
+ Version: 0.10.4
4
4
  Summary: Fast and predictable migrations to Databricks Lakehouse Platform. This tool is designed to help you migrate your data and workloads to the Databricks Lakehouse Platform in a fast, predictable, and reliable way. It provides a set of tools and utilities to help you reconcile your data and workloads, assess your current state, and plan your migration.
5
5
  Project-URL: Documentation, https://databrickslabs.github.io/lakebridge
6
6
  Project-URL: Issues, https://github.com/databrickslabs/lakebridge/issues
@@ -8,12 +8,12 @@ docs/lakebridge/src/theme/Footer/index.tsx,sha256=Jj8zY5WDiTLXwF_mAgld8Dh1A3MY1H
8
8
  docs/lakebridge/src/theme/Layout/index.tsx,sha256=IkdLr13jKmLxT0jWQqrwqrjVXc8Rwd_kWNpTd1t2sc0,592
9
9
  databricks/__init__.py,sha256=YqH8Hy8lHJxd0hLMZF6kWirUDdPiX90LRDX6S6yTMn0,261
10
10
  databricks/labs/__init__.py,sha256=YqH8Hy8lHJxd0hLMZF6kWirUDdPiX90LRDX6S6yTMn0,261
11
- databricks/labs/lakebridge/__about__.py,sha256=LBCN0OI_6vUqxgIo75HVdQc1TP5LOmy5HIs3OKrDIpk,49
11
+ databricks/labs/lakebridge/__about__.py,sha256=YbaA9ccBU-BwJ67El953ndAg5kViw0qZOkui5tbsV8c,49
12
12
  databricks/labs/lakebridge/__init__.py,sha256=nUNECqNvyfpT0aeWwlqG0ADT8U8ScCLb8WWpLydppcA,464
13
13
  databricks/labs/lakebridge/base_install.py,sha256=8NxXsNpgqXnuADKXVFh5oQL3osdvygRMY1amJwKfU08,490
14
- databricks/labs/lakebridge/cli.py,sha256=6exPUJs7c2qVo-X9VXFg5VM3XqOCdlk0_5OXfPw6nbY,31578
14
+ databricks/labs/lakebridge/cli.py,sha256=zeibf44Sfu6gjL5lLGnKTXle4kIVBy4hHDuITVL5TxY,31805
15
15
  databricks/labs/lakebridge/config.py,sha256=IjxvphM9fRQHQ2FAxwZ23deJGgSemJ3rMV0sp1Ob6e8,5833
16
- databricks/labs/lakebridge/install.py,sha256=EmtzbC-pOeiK7lqn4wxSRoeODlkqB_lQBJ9Mj4E0kjE,40536
16
+ databricks/labs/lakebridge/install.py,sha256=x8YQwX-EwSKGYKHeotzUwADfzBrvFeplDHbjifLq9mA,39757
17
17
  databricks/labs/lakebridge/jvmproxy.py,sha256=F9pXpemzdaJXwpshHxVM9PYU_eNn4zTCUFQ5vc9WIhA,1573
18
18
  databricks/labs/lakebridge/lineage.py,sha256=Q2oky4RkODRHWMwIQIwbYXSdZTmRkMWwEh6RssBiQxY,1843
19
19
  databricks/labs/lakebridge/uninstall.py,sha256=hf36YgeW9XO2cRvvn6AXUZdihQ1ZMHnR38OVEF5sfRw,759
@@ -165,9 +165,9 @@ databricks/labs/lakebridge/transpiler/sqlglot/parsers/presto.py,sha256=bY6Ku8ZPW
165
165
  databricks/labs/lakebridge/transpiler/sqlglot/parsers/snowflake.py,sha256=dZ7BdOlBZlkbiN9G9bu4l2c456265Gx9WoWUPRa7Ffg,23203
166
166
  databricks/labs/lakebridge/upgrades/v0.4.0_add_main_table_operation_name_column.py,sha256=wMTbj1q5td4fa5DCk0tWFJ-OmhhzsExRLYUe4PKmk0s,3527
167
167
  databricks/labs/lakebridge/upgrades/v0.6.0_alter_metrics_datatype.py,sha256=hnTHRtqzwPSF5Judzh6ss-uB5h3IFtm2ylWduwRNq5Y,2424
168
- databricks_labs_lakebridge-0.10.3.dist-info/METADATA,sha256=e7yr--8po1oLKE8_BRRTFbv_y2fJ0Fw5F95wT2bnn8U,3078
169
- databricks_labs_lakebridge-0.10.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
170
- databricks_labs_lakebridge-0.10.3.dist-info/entry_points.txt,sha256=Idr1CT73b8wShdr287yu1hheGbDbhBvucVUlZcbpiPo,75
171
- databricks_labs_lakebridge-0.10.3.dist-info/licenses/LICENSE,sha256=1hG0Cvw6mp9nL9qRoHFcCUk9fYqhcnj2vgJ75rt3BxA,3862
172
- databricks_labs_lakebridge-0.10.3.dist-info/licenses/NOTICE,sha256=wtxMsNvTkw1hAEkkWHz8A8JrYySAUSt1tOTcqddkWEg,1797
173
- databricks_labs_lakebridge-0.10.3.dist-info/RECORD,,
168
+ databricks_labs_lakebridge-0.10.4.dist-info/METADATA,sha256=DPgVGYnjOrTddEMWZRX2jXPHhHw6WBZJXviKtE5SQ14,3078
169
+ databricks_labs_lakebridge-0.10.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
170
+ databricks_labs_lakebridge-0.10.4.dist-info/entry_points.txt,sha256=Idr1CT73b8wShdr287yu1hheGbDbhBvucVUlZcbpiPo,75
171
+ databricks_labs_lakebridge-0.10.4.dist-info/licenses/LICENSE,sha256=1hG0Cvw6mp9nL9qRoHFcCUk9fYqhcnj2vgJ75rt3BxA,3862
172
+ databricks_labs_lakebridge-0.10.4.dist-info/licenses/NOTICE,sha256=wtxMsNvTkw1hAEkkWHz8A8JrYySAUSt1tOTcqddkWEg,1797
173
+ databricks_labs_lakebridge-0.10.4.dist-info/RECORD,,