ygg 0.1.44__tar.gz → 0.1.45__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. {ygg-0.1.44 → ygg-0.1.45}/PKG-INFO +1 -1
  2. {ygg-0.1.44 → ygg-0.1.45}/pyproject.toml +1 -1
  3. {ygg-0.1.44 → ygg-0.1.45}/src/ygg.egg-info/PKG-INFO +1 -1
  4. {ygg-0.1.44 → ygg-0.1.45}/src/ygg.egg-info/SOURCES.txt +1 -0
  5. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/databricks/compute/cluster.py +20 -16
  6. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/databricks/compute/execution_context.py +35 -50
  7. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/databricks/sql/engine.py +5 -2
  8. ygg-0.1.45/src/yggdrasil/databricks/sql/warehouse.py +355 -0
  9. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/databricks/workspaces/workspace.py +19 -6
  10. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/pyutils/callable_serde.py +183 -281
  11. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/pyutils/expiring_dict.py +114 -25
  12. ygg-0.1.45/src/yggdrasil/version.py +1 -0
  13. ygg-0.1.44/src/yggdrasil/version.py +0 -1
  14. {ygg-0.1.44 → ygg-0.1.45}/LICENSE +0 -0
  15. {ygg-0.1.44 → ygg-0.1.45}/README.md +0 -0
  16. {ygg-0.1.44 → ygg-0.1.45}/setup.cfg +0 -0
  17. {ygg-0.1.44 → ygg-0.1.45}/src/ygg.egg-info/dependency_links.txt +0 -0
  18. {ygg-0.1.44 → ygg-0.1.45}/src/ygg.egg-info/entry_points.txt +0 -0
  19. {ygg-0.1.44 → ygg-0.1.45}/src/ygg.egg-info/requires.txt +0 -0
  20. {ygg-0.1.44 → ygg-0.1.45}/src/ygg.egg-info/top_level.txt +0 -0
  21. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/__init__.py +0 -0
  22. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/databricks/__init__.py +0 -0
  23. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/databricks/compute/__init__.py +0 -0
  24. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/databricks/compute/remote.py +0 -0
  25. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/databricks/jobs/__init__.py +0 -0
  26. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/databricks/jobs/config.py +0 -0
  27. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/databricks/sql/__init__.py +0 -0
  28. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/databricks/sql/exceptions.py +0 -0
  29. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/databricks/sql/statement_result.py +0 -0
  30. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/databricks/sql/types.py +0 -0
  31. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/databricks/workspaces/__init__.py +0 -0
  32. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/databricks/workspaces/filesytem.py +0 -0
  33. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/databricks/workspaces/io.py +0 -0
  34. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/databricks/workspaces/path.py +0 -0
  35. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/databricks/workspaces/path_kind.py +0 -0
  36. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/dataclasses/__init__.py +0 -0
  37. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/dataclasses/dataclass.py +0 -0
  38. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/libs/__init__.py +0 -0
  39. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/libs/databrickslib.py +0 -0
  40. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/libs/extensions/__init__.py +0 -0
  41. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/libs/extensions/polars_extensions.py +0 -0
  42. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/libs/extensions/spark_extensions.py +0 -0
  43. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/libs/pandaslib.py +0 -0
  44. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/libs/polarslib.py +0 -0
  45. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/libs/sparklib.py +0 -0
  46. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/pyutils/__init__.py +0 -0
  47. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/pyutils/equality.py +0 -0
  48. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/pyutils/exceptions.py +0 -0
  49. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/pyutils/modules.py +0 -0
  50. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/pyutils/parallel.py +0 -0
  51. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/pyutils/python_env.py +0 -0
  52. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/pyutils/retry.py +0 -0
  53. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/requests/__init__.py +0 -0
  54. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/requests/msal.py +0 -0
  55. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/requests/session.py +0 -0
  56. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/types/__init__.py +0 -0
  57. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/types/cast/__init__.py +0 -0
  58. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/types/cast/arrow_cast.py +0 -0
  59. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/types/cast/cast_options.py +0 -0
  60. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/types/cast/pandas_cast.py +0 -0
  61. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/types/cast/polars_cast.py +0 -0
  62. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/types/cast/polars_pandas_cast.py +0 -0
  63. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/types/cast/registry.py +0 -0
  64. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/types/cast/spark_cast.py +0 -0
  65. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/types/cast/spark_pandas_cast.py +0 -0
  66. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/types/cast/spark_polars_cast.py +0 -0
  67. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/types/libs.py +0 -0
  68. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/types/python_arrow.py +0 -0
  69. {ygg-0.1.44 → ygg-0.1.45}/src/yggdrasil/types/python_defaults.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ygg
3
- Version: 0.1.44
3
+ Version: 0.1.45
4
4
  Summary: Type-friendly utilities for moving data between Python objects, Arrow, Polars, Pandas, Spark, and Databricks
5
5
  Author: Yggdrasil contributors
6
6
  License: Apache License
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "ygg"
7
- version = "0.1.44"
7
+ version = "0.1.45"
8
8
  description = "Type-friendly utilities for moving data between Python objects, Arrow, Polars, Pandas, Spark, and Databricks"
9
9
  readme = { file = "README.md", content-type = "text/markdown" }
10
10
  license = { file = "LICENSE" }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ygg
3
- Version: 0.1.44
3
+ Version: 0.1.45
4
4
  Summary: Type-friendly utilities for moving data between Python objects, Arrow, Polars, Pandas, Spark, and Databricks
5
5
  Author: Yggdrasil contributors
6
6
  License: Apache License
@@ -21,6 +21,7 @@ src/yggdrasil/databricks/sql/engine.py
21
21
  src/yggdrasil/databricks/sql/exceptions.py
22
22
  src/yggdrasil/databricks/sql/statement_result.py
23
23
  src/yggdrasil/databricks/sql/types.py
24
+ src/yggdrasil/databricks/sql/warehouse.py
24
25
  src/yggdrasil/databricks/workspaces/__init__.py
25
26
  src/yggdrasil/databricks/workspaces/filesytem.py
26
27
  src/yggdrasil/databricks/workspaces/io.py
@@ -22,7 +22,7 @@ from typing import Any, Iterator, Optional, Union, List, Callable, Dict, ClassVa
22
22
 
23
23
  from .execution_context import ExecutionContext
24
24
  from ..workspaces.workspace import WorkspaceService, Workspace
25
- from ... import retry, CallableSerde
25
+ from ... import CallableSerde
26
26
  from ...libs.databrickslib import databricks_sdk
27
27
  from ...pyutils.equality import dicts_equal, dict_diff
28
28
  from ...pyutils.expiring_dict import ExpiringDict
@@ -47,6 +47,7 @@ else: # pragma: no cover - runtime fallback when SDK is missing
47
47
  __all__ = ["Cluster"]
48
48
 
49
49
 
50
+ LOGGER = logging.getLogger(__name__)
50
51
  NAME_ID_CACHE: dict[str, ExpiringDict] = {}
51
52
 
52
53
 
@@ -72,9 +73,6 @@ def get_cached_cluster_id(
72
73
  return existing.get(cluster_name) if existing else None
73
74
 
74
75
 
75
- logger = logging.getLogger(__name__)
76
-
77
-
78
76
  # module-level mapping Databricks Runtime -> (major, minor) Python version
79
77
  _PYTHON_BY_DBR: dict[str, tuple[int, int]] = {
80
78
  "10.4": (3, 8),
@@ -363,7 +361,8 @@ class Cluster(WorkspaceService):
363
361
  tick: float = 0.5,
364
362
  timeout: Union[float, dt.timedelta] = 600,
365
363
  backoff: int = 2,
366
- max_sleep_time: float = 15
364
+ max_sleep_time: float = 15,
365
+ wait_libraries: bool = True
367
366
  ):
368
367
  """Wait for the cluster to exit pending states.
369
368
 
@@ -372,6 +371,7 @@ class Cluster(WorkspaceService):
372
371
  timeout: Max seconds to wait before timing out.
373
372
  backoff: Backoff multiplier for the sleep interval.
374
373
  max_sleep_time: Maximum sleep interval in seconds.
374
+ wait_libraries: Wait libraries to install fully
375
375
 
376
376
  Returns:
377
377
  The current Cluster instance.
@@ -390,7 +390,8 @@ class Cluster(WorkspaceService):
390
390
 
391
391
  sleep_time = min(max_sleep_time, sleep_time * backoff)
392
392
 
393
- self.wait_installed_libraries()
393
+ if wait_libraries:
394
+ self.wait_installed_libraries()
394
395
 
395
396
  self.raise_for_status()
396
397
 
@@ -638,7 +639,7 @@ class Cluster(WorkspaceService):
638
639
  if k in _CREATE_ARG_NAMES
639
640
  }
640
641
 
641
- logger.debug(
642
+ LOGGER.debug(
642
643
  "Creating Databricks cluster %s with %s",
643
644
  update_details["cluster_name"],
644
645
  update_details,
@@ -646,7 +647,7 @@ class Cluster(WorkspaceService):
646
647
 
647
648
  self.details = self.clusters_client().create_and_wait(**update_details)
648
649
 
649
- logger.info(
650
+ LOGGER.info(
650
651
  "Created %s",
651
652
  self
652
653
  )
@@ -699,7 +700,7 @@ class Cluster(WorkspaceService):
699
700
  for k, v in dict_diff(existing_details, update_details, keys=_EDIT_ARG_NAMES).items()
700
701
  }
701
702
 
702
- logger.debug(
703
+ LOGGER.debug(
703
704
  "Updating %s with %s",
704
705
  self, diff
705
706
  )
@@ -707,7 +708,7 @@ class Cluster(WorkspaceService):
707
708
  self.wait_for_status()
708
709
  self.clusters_client().edit(**update_details)
709
710
 
710
- logger.info(
711
+ LOGGER.info(
711
712
  "Updated %s",
712
713
  self
713
714
  )
@@ -811,7 +812,7 @@ class Cluster(WorkspaceService):
811
812
  self.wait_for_status()
812
813
 
813
814
  if not self.is_running:
814
- logger.info("Starting %s", self)
815
+ LOGGER.debug("Starting %s", self)
815
816
 
816
817
  if wait_timeout:
817
818
  self.clusters_client().start(cluster_id=self.cluster_id)
@@ -819,6 +820,8 @@ class Cluster(WorkspaceService):
819
820
  else:
820
821
  self.clusters_client().start(cluster_id=self.cluster_id)
821
822
 
823
+ LOGGER.info("Started %s", self)
824
+
822
825
  return self
823
826
 
824
827
  def restart(
@@ -832,7 +835,6 @@ class Cluster(WorkspaceService):
832
835
  self.wait_for_status()
833
836
 
834
837
  if self.is_running:
835
- logger.info("Restarting %s", self)
836
838
  self.details = self.clusters_client().restart_and_wait(cluster_id=self.cluster_id)
837
839
  return self.wait_for_status()
838
840
 
@@ -846,8 +848,10 @@ class Cluster(WorkspaceService):
846
848
  Returns:
847
849
  The SDK delete response.
848
850
  """
849
- logger.info("Deleting %s", self)
850
- return self.clusters_client().delete(cluster_id=self.cluster_id)
851
+ if self.cluster_id:
852
+ LOGGER.debug("Deleting %s", self)
853
+ self.clusters_client().delete(cluster_id=self.cluster_id)
854
+ LOGGER.info("Deleted %s", self)
851
855
 
852
856
  def context(
853
857
  self,
@@ -1150,7 +1154,7 @@ class Cluster(WorkspaceService):
1150
1154
  if raise_error:
1151
1155
  raise DatabricksError("Libraries %s in %s failed to install" % (failed, self))
1152
1156
 
1153
- logger.warning(
1157
+ LOGGER.exception(
1154
1158
  "Libraries %s in %s failed to install",
1155
1159
  failed, self
1156
1160
  )
@@ -1187,7 +1191,7 @@ class Cluster(WorkspaceService):
1187
1191
  Returns:
1188
1192
  The uploaded library argument(s).
1189
1193
  """
1190
- return self.context().install_temporary_libraries(libraries=libraries)
1194
+ return self.system_context.install_temporary_libraries(libraries=libraries)
1191
1195
 
1192
1196
  def _check_library(
1193
1197
  self,
@@ -17,6 +17,7 @@ from typing import TYPE_CHECKING, Optional, Any, Callable, List, Dict, Union, It
17
17
 
18
18
  from ...libs.databrickslib import databricks_sdk
19
19
  from ...pyutils.exceptions import raise_parsed_traceback
20
+ from ...pyutils.expiring_dict import ExpiringDict
20
21
  from ...pyutils.modules import resolve_local_lib_path
21
22
  from ...pyutils.callable_serde import CallableSerde
22
23
 
@@ -30,7 +31,7 @@ __all__ = [
30
31
  "ExecutionContext"
31
32
  ]
32
33
 
33
- logger = logging.getLogger(__name__)
34
+ LOGGER = logging.getLogger(__name__)
34
35
 
35
36
 
36
37
  @dc.dataclass
@@ -38,7 +39,6 @@ class RemoteMetadata:
38
39
  """Metadata describing the remote cluster execution environment."""
39
40
  site_packages_path: Optional[str] = dc.field(default=None)
40
41
  os_env: Dict[str, str] = dc.field(default_factory=dict)
41
- requirements: Optional[str] = dc.field(default=None)
42
42
  version_info: Tuple[int, int, int] = dc.field(default=(0, 0, 0))
43
43
 
44
44
  def os_env_diff(
@@ -80,6 +80,7 @@ class ExecutionContext:
80
80
 
81
81
  _was_connected: Optional[bool] = dc.field(default=None, repr=False)
82
82
  _remote_metadata: Optional[RemoteMetadata] = dc.field(default=None, repr=False)
83
+ _uploaded_package_roots: Optional[ExpiringDict] = dc.field(default_factory=ExpiringDict, repr=False)
83
84
 
84
85
  _lock: threading.RLock = dc.field(default_factory=threading.RLock, init=False, repr=False)
85
86
 
@@ -127,9 +128,7 @@ class ExecutionContext:
127
128
  with self._lock:
128
129
  # double-check after acquiring lock
129
130
  if self._remote_metadata is None:
130
- cmd = r"""import glob
131
- import json
132
- import os
131
+ cmd = r"""import glob, json, os
133
132
  from yggdrasil.pyutils.python_env import PythonEnv
134
133
 
135
134
  current_env = PythonEnv.get_current()
@@ -144,7 +143,6 @@ os_env = meta["os_env"] = {}
144
143
  for k, v in os.environ.items():
145
144
  os_env[k] = v
146
145
 
147
- meta["requirements"] = current_env.requirements()
148
146
  meta["version_info"] = current_env.version_info
149
147
 
150
148
  print(json.dumps(meta))"""
@@ -191,7 +189,7 @@ print(json.dumps(meta))"""
191
189
  """
192
190
  self.cluster.ensure_running()
193
191
 
194
- logger.debug(
192
+ LOGGER.debug(
195
193
  "Creating Databricks command execution context for %s",
196
194
  self.cluster
197
195
  )
@@ -217,7 +215,7 @@ print(json.dumps(meta))"""
217
215
  The connected ExecutionContext instance.
218
216
  """
219
217
  if self.context_id is not None:
220
- logger.debug(
218
+ LOGGER.debug(
221
219
  "Execution context already open for %s",
222
220
  self
223
221
  )
@@ -235,7 +233,7 @@ print(json.dumps(meta))"""
235
233
  raise RuntimeError("Failed to create command execution context")
236
234
 
237
235
  self.context_id = context_id
238
- logger.info(
236
+ LOGGER.info(
239
237
  "Opened execution context for %s",
240
238
  self
241
239
  )
@@ -247,13 +245,9 @@ print(json.dumps(meta))"""
247
245
  Returns:
248
246
  None.
249
247
  """
250
- if self.context_id is None:
248
+ if not self.context_id:
251
249
  return
252
250
 
253
- logger.debug(
254
- "Closing execution context for %s",
255
- self
256
- )
257
251
  try:
258
252
  self._workspace_client().command_execution.destroy(
259
253
  cluster_id=self.cluster.cluster_id,
@@ -349,7 +343,7 @@ print(json.dumps(meta))"""
349
343
 
350
344
  self.connect(language=Language.PYTHON)
351
345
 
352
- logger.debug(
346
+ LOGGER.debug(
353
347
  "Executing callable %s with %s",
354
348
  getattr(func, "__name__", type(func)),
355
349
  self,
@@ -394,12 +388,18 @@ print(json.dumps(meta))"""
394
388
  module_name = module_name.split(".")[0]
395
389
 
396
390
  if module_name and "yggdrasil" not in module_name:
397
- self.close()
391
+ LOGGER.debug(
392
+ "Installing missing module %s from local environment",
393
+ module_name,
394
+ )
398
395
 
399
- self.cluster.install_libraries(
396
+ self.install_temporary_libraries(
400
397
  libraries=[module_name],
401
- raise_error=True,
402
- restart=True
398
+ )
399
+
400
+ LOGGER.warning(
401
+ "Installed missing module %s from local environment",
402
+ module_name,
403
403
  )
404
404
 
405
405
  return self.execute_callable(
@@ -412,6 +412,7 @@ print(json.dumps(meta))"""
412
412
  timeout=timeout,
413
413
  command=command,
414
414
  )
415
+
415
416
  raise remote_module_error
416
417
 
417
418
  return result
@@ -446,29 +447,7 @@ print(json.dumps(meta))"""
446
447
  timeout=timeout or dt.timedelta(minutes=20)
447
448
  )
448
449
 
449
- try:
450
- return self._decode_result(result, result_tag=result_tag, print_stdout=print_stdout)
451
- except ModuleNotFoundError as remote_module_error:
452
- _MOD_NOT_FOUND_RE = re.compile(r"No module named ['\"]([^'\"]+)['\"]")
453
- module_name = _MOD_NOT_FOUND_RE.search(str(remote_module_error))
454
- module_name = module_name.group(1) if module_name else None
455
- module_name = module_name.split(".")[0]
456
-
457
- if module_name and "yggdrasil" not in module_name:
458
- self.close()
459
- self.cluster.install_libraries(
460
- libraries=[module_name],
461
- raise_error=True,
462
- restart=True
463
- )
464
-
465
- return self.execute_command(
466
- command=command,
467
- timeout=timeout,
468
- result_tag=result_tag,
469
- print_stdout=print_stdout
470
- )
471
- raise remote_module_error
450
+ return self._decode_result(result, result_tag=result_tag, print_stdout=print_stdout)
472
451
 
473
452
  # ------------------------------------------------------------------
474
453
  # generic local → remote uploader, via remote python
@@ -589,16 +568,22 @@ with zipfile.ZipFile(buf, "r") as zf:
589
568
  ]
590
569
 
591
570
  resolved = resolve_local_lib_path(libraries)
571
+ str_resolved = str(resolved)
572
+ existing = self._uploaded_package_roots.get(str_resolved)
592
573
 
593
- remote_site_packages_path = self.remote_metadata.site_packages_path
594
- if resolved.is_dir():
595
- # site-packages/<package_name>/
596
- remote_target = posixpath.join(remote_site_packages_path, resolved.name)
597
- else:
598
- # site-packages/<module_file>
599
- remote_target = posixpath.join(remote_site_packages_path, resolved.name)
574
+ if not existing:
575
+ remote_site_packages_path = self.remote_metadata.site_packages_path
576
+
577
+ if resolved.is_dir():
578
+ # site-packages/<package_name>/
579
+ remote_target = posixpath.join(remote_site_packages_path, resolved.name)
580
+ else:
581
+ # site-packages/<module_file>
582
+ remote_target = posixpath.join(remote_site_packages_path, resolved.name)
583
+
584
+ self.upload_local_path(resolved, remote_target)
600
585
 
601
- self.upload_local_path(resolved, remote_target)
586
+ self._uploaded_package_roots[str_resolved] = remote_target
602
587
 
603
588
  return libraries
604
589
 
@@ -130,7 +130,10 @@ class SQLEngine(WorkspaceService):
130
130
  schema_name = schema_name or self.schema_name
131
131
  return catalog_name, schema_name, table_name
132
132
 
133
- def _default_warehouse(self, cluster_size: str = "Small"):
133
+ def _default_warehouse(
134
+ self,
135
+ cluster_size: str = "Small"
136
+ ):
134
137
  """Pick a default SQL warehouse (best-effort) matching the desired size.
135
138
 
136
139
  Args:
@@ -951,7 +954,7 @@ FROM parquet.`{temp_volume_path}`"""
951
954
  """Convert an Arrow Field to a Databricks SQL column DDL fragment."""
952
955
  name = field.name
953
956
  nullable_str = " NOT NULL" if put_not_null and not field.nullable else ""
954
- name_str = f"{name} " if put_name else ""
957
+ name_str = f"`{name}` " if put_name else ""
955
958
 
956
959
  comment_str = ""
957
960
  if put_comment and field.metadata and b"comment" in field.metadata: