dvc-databricks 1.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,17 @@
1
+ """
2
+ dvc-databricks — DVC remote plugin for Databricks Unity Catalog Volumes.
3
+
4
+ Registers the ``dbvol`` protocol into ``dvc_objects.fs.known_implementations``
5
+ so that DVC can resolve ``dbvol://`` remotes in any process where this
6
+ package is installed.
7
+
8
+ This registration runs on import. The package uses a .pth file (installed
9
+ into site-packages) to ensure this module is imported at Python startup,
10
+ which makes ``dvc push`` / ``dvc pull`` work from the CLI without any
11
+ manual imports.
12
+ """
13
+ from dvc_objects.fs import known_implementations
14
+
15
+ known_implementations["dbvol"] = {
16
+ "class": "dvc_databricks.filesystem.DatabricksVolumesFileSystem",
17
+ }
@@ -0,0 +1,546 @@
1
+ """
2
+ DVC filesystem plugin for Databricks Unity Catalog Volumes.
3
+
4
+ Architecture:
5
+
6
+ DatabricksVolumesFileSystem ← dvc_objects.FileSystem subclass
7
+ │ DVC-facing layer: config parsing,
8
+ │ checksum strategy, plugin registration
9
+
10
+ └── self.fs ← _DatabricksVolumesFS (fsspec.AbstractFileSystem)
11
+ I/O layer: upload, download, list, delete
12
+ via Databricks SDK Files API
13
+
14
+ When this package is installed, the ``dvc.plugins`` entry point registers
15
+ ``DatabricksVolumesFileSystem`` under the ``dbvol`` protocol. DVC discovers
16
+ it automatically — no imports or manual configuration required.
17
+
18
+ Users configure the remote once:
19
+
20
+ dvc remote add -d myremote dbvol:///Volumes/catalog/schema/volume/path
21
+ export DATABRICKS_CONFIG_PROFILE=<profile>
22
+
23
+ Then use standard DVC commands as usual:
24
+
25
+ dvc push / dvc pull / dvc status ...
26
+ """
27
+
28
+ from __future__ import annotations
29
+
30
+ import io
31
+ import logging
32
+ import os
33
+ import threading
34
+ from typing import ClassVar
35
+
36
+ from databricks.sdk import WorkspaceClient
37
+ from databricks.sdk.config import Config
38
+ from dvc_objects.fs.base import FileSystem
39
+ from fsspec import AbstractFileSystem
40
+
41
+ logger = logging.getLogger(__name__)
42
+
43
+
44
+ # ---------------------------------------------------------------------------
45
+ # Inner fsspec filesystem — handles raw I/O via Databricks SDK
46
+ # ---------------------------------------------------------------------------
47
+
48
+
49
+ class _DatabricksVolumesFS(AbstractFileSystem):
50
+ """fsspec filesystem that routes all I/O through the Databricks SDK Files API.
51
+
52
+ This is the I/O layer used internally by ``DatabricksVolumesFileSystem``.
53
+ It is not intended to be used directly by end users.
54
+
55
+ Args:
56
+ profile: Databricks CLI profile name from ``~/.databrickscfg``.
57
+ When ``None``, the SDK reads ``DATABRICKS_CONFIG_PROFILE`` from
58
+ the environment, then falls back to the default profile.
59
+ **storage_options: Additional options forwarded to
60
+ ``AbstractFileSystem.__init__``.
61
+ """
62
+
63
+ protocol = "dbvol"
64
+
65
+ def __init__(self, profile: str | None = None, **storage_options):
66
+
67
+ super().__init__(**storage_options)
68
+
69
+ resolved = profile or os.environ.get("DATABRICKS_CONFIG_PROFILE")
70
+ cfg = Config(profile=resolved) if resolved else Config()
71
+ self._client = WorkspaceClient(config=cfg)
72
+
73
+ # ------------------------------------------------------------------
74
+ # Path helpers
75
+ # ------------------------------------------------------------------
76
+
77
+ @classmethod
78
+ def _strip_protocol(cls, path: str) -> str:
79
+ """Remove the ``dbvol://`` prefix and ensure a leading slash.
80
+
81
+ Args:
82
+ path: Raw path, possibly prefixed with ``dbvol://``. Also accepts
83
+ a list of paths, in which case each element is processed.
84
+
85
+ Returns:
86
+ Absolute path string starting with ``/``, or a list of such
87
+ strings when the input is a list.
88
+ """
89
+ if isinstance(path, list):
90
+ return [cls._strip_protocol(p) for p in path]
91
+
92
+ path = super()._strip_protocol(path)
93
+
94
+ if not path.startswith("/"):
95
+ path = "/" + path
96
+
97
+ return path
98
+
99
+ # ------------------------------------------------------------------
100
+ # Metadata operations
101
+ # ------------------------------------------------------------------
102
+
103
+ def ls(self, path: str, detail: bool = True, **kwargs):
104
+ """List a directory or return a single-item list for a file.
105
+
106
+ Tries the path as a directory first; falls back to ``info()`` if the
107
+ directory listing raises an exception (i.e. path is a file).
108
+
109
+ Args:
110
+ path: Absolute Volume path to list.
111
+ detail: If ``True``, return a list of dicts with keys
112
+ ``name``, ``type``, ``size``, and ``last_modified``.
113
+ If ``False``, return a list of path strings.
114
+ **kwargs: Ignored; present for fsspec compatibility.
115
+
116
+ Returns:
117
+ List of dicts (when ``detail=True``) or list of path strings
118
+ (when ``detail=False``).
119
+
120
+ Raises:
121
+ FileNotFoundError: If the path does not exist.
122
+ """
123
+ path = self._strip_protocol(path)
124
+
125
+ try:
126
+ entries = list(self._client.files.list_directory_contents(path))
127
+ result = []
128
+
129
+ for entry in entries:
130
+ info = {
131
+ "name": entry.path,
132
+ "type": "directory" if entry.is_directory else "file",
133
+ "size": entry.file_size or 0,
134
+ "last_modified": entry.last_modified,
135
+ }
136
+ result.append(info if detail else entry.path)
137
+
138
+ return result
139
+
140
+ except Exception:
141
+ pass
142
+
143
+ # Fall back to a single-file lookup via info() to avoid duplicating
144
+ # the metadata retrieval logic.
145
+ info = self.info(path) # raises FileNotFoundError if path does not exist
146
+ return [info] if detail else [info["name"]]
147
+
148
+ def info(self, path: str, **kwargs) -> dict:
149
+ """Return metadata for a single file or directory.
150
+
151
+ Tries a file metadata lookup first (cheaper), then falls back to a
152
+ directory metadata lookup.
153
+
154
+ Args:
155
+ path: Absolute Volume path.
156
+ **kwargs: Ignored; present for fsspec compatibility.
157
+
158
+ Returns:
159
+ Dict with keys ``name`` (str), ``type`` (``"file"`` or
160
+ ``"directory"``), and ``size`` (int, bytes).
161
+
162
+ Raises:
163
+ FileNotFoundError: If the path does not exist.
164
+ """
165
+ path = self._strip_protocol(path)
166
+
167
+ try:
168
+ meta = self._client.files.get_metadata(path)
169
+
170
+ return {"name": path, "type": "file", "size": meta.content_length or 0}
171
+ except Exception:
172
+ pass
173
+
174
+ try:
175
+ self._client.files.get_directory_metadata(path)
176
+
177
+ return {"name": path, "type": "directory", "size": 0}
178
+ except Exception:
179
+ raise FileNotFoundError(f"No such file or directory: {path!r}")
180
+
181
+ def exists(self, path: str, **kwargs) -> bool:
182
+ """Return ``True`` if *path* exists on the Volume, ``False`` otherwise.
183
+
184
+ Args:
185
+ path: Absolute Volume path to check.
186
+ **kwargs: Ignored; present for fsspec compatibility.
187
+
188
+ Returns:
189
+ ``True`` if the path exists, ``False`` if not.
190
+ """
191
+ try:
192
+ self.info(path)
193
+ return True
194
+ except FileNotFoundError:
195
+ return False
196
+
197
+ # ------------------------------------------------------------------
198
+ # Directory operations
199
+ # ------------------------------------------------------------------
200
+
201
+ def mkdir(self, path: str, create_parents: bool = True, **kwargs):
202
+ """Create a directory on the Volume.
203
+
204
+ Args:
205
+ path: Absolute Volume path for the new directory.
206
+ create_parents: Ignored — the Databricks Files API always
207
+ creates intermediate directories automatically.
208
+ **kwargs: Ignored; present for fsspec compatibility.
209
+ """
210
+ path = self._strip_protocol(path)
211
+ self._client.files.create_directory(path)
212
+
213
+ def makedirs(self, path: str, exist_ok: bool = False):
214
+ """Create a directory and all intermediate parents.
215
+
216
+ Args:
217
+ path: Absolute Volume path for the new directory.
218
+ exist_ok: If ``False``, re-raises any exception thrown by the
219
+ API. If ``True``, suppresses those exceptions.
220
+ """
221
+ path = self._strip_protocol(path)
222
+ try:
223
+ self._client.files.create_directory(path)
224
+ except Exception:
225
+ if not exist_ok:
226
+ raise
227
+
228
+ def rm_file(self, path: str):
229
+ """Delete a single file from the Volume.
230
+
231
+ Args:
232
+ path: Absolute Volume path of the file to delete.
233
+ """
234
+ path = self._strip_protocol(path)
235
+ self._client.files.delete(path)
236
+
237
+ def rm(self, path, recursive: bool = False, **kwargs):
238
+ """Delete one or more files or directories from the Volume.
239
+
240
+ Args:
241
+ path: Absolute Volume path (str) or list of paths to delete.
242
+ recursive: If ``True``, recursively delete directory contents
243
+ before deleting the directory itself.
244
+ **kwargs: Ignored; present for fsspec compatibility.
245
+ """
246
+ paths = path if isinstance(path, list) else [path]
247
+
248
+ for p in paths:
249
+ p = self._strip_protocol(p)
250
+
251
+ if recursive and self.isdir(p):
252
+
253
+ for entry in self.ls(p, detail=True):
254
+ self.rm(entry["name"], recursive=True)
255
+ else:
256
+ self._client.files.delete(p)
257
+
258
+ # ------------------------------------------------------------------
259
+ # File I/O
260
+ # ------------------------------------------------------------------
261
+
262
+ def _open(self, path: str, mode: str = "rb", **kwargs):
263
+ """Open a file on the Volume for reading or writing.
264
+
265
+ For reads, the file is downloaded eagerly into a ``BytesIO`` buffer.
266
+ For writes, a ``_WriteBuffer`` is returned which uploads on ``close()``.
267
+
268
+ Args:
269
+ path: Absolute Volume path.
270
+ mode: ``"rb"`` for reading or ``"wb"`` for writing.
271
+ **kwargs: Ignored; present for fsspec compatibility.
272
+
273
+ Returns:
274
+ A ``BytesIO`` instance (read mode) or a ``_WriteBuffer``
275
+ instance (write mode).
276
+
277
+ Raises:
278
+ ValueError: If *mode* is neither ``"rb"`` nor ``"wb"``.
279
+ """
280
+ path = self._strip_protocol(path)
281
+
282
+ if "r" in mode:
283
+ response = self._client.files.download(path)
284
+ return io.BytesIO(response.contents.read())
285
+
286
+ if "w" in mode:
287
+ return _WriteBuffer(self._client, path)
288
+
289
+ raise ValueError(f"Unsupported mode: {mode!r}")
290
+
291
+ def put_file(self, lpath: str, rpath: str, **kwargs):
292
+ """Upload a single local file to the Volume.
293
+
294
+ Args:
295
+ lpath: Absolute local filesystem path of the source file.
296
+ rpath: Absolute Volume path of the destination.
297
+ **kwargs: Ignored; present for fsspec compatibility.
298
+ """
299
+ rpath = self._strip_protocol(rpath)
300
+
301
+ with open(lpath, "rb") as fh:
302
+ self._client.files.upload(rpath, fh, overwrite=True)
303
+
304
+ def get_file(self, rpath: str, lpath: str, outfile=None, **kwargs):
305
+ """Download a single file from the Volume to a local path.
306
+
307
+ Args:
308
+ rpath: Absolute Volume path of the source file.
309
+ lpath: Absolute local filesystem path of the destination.
310
+ Intermediate directories are created automatically.
311
+ outfile: If provided, write the downloaded bytes into this
312
+ file-like object instead of saving to *lpath*.
313
+ **kwargs: Ignored; present for fsspec compatibility.
314
+ """
315
+ rpath = self._strip_protocol(rpath)
316
+ response = self._client.files.download(rpath)
317
+
318
+ if outfile is not None:
319
+ outfile.write(response.contents.read())
320
+ else:
321
+ os.makedirs(os.path.dirname(os.path.abspath(lpath)), exist_ok=True)
322
+
323
+ with open(lpath, "wb") as fh:
324
+ fh.write(response.contents.read())
325
+
326
+
327
+ class _WriteBuffer(io.RawIOBase):
328
+ """Write-only in-memory buffer that uploads to Databricks on ``close()``.
329
+
330
+ fsspec's ``_open(mode="wb")`` contract expects a file-like object.
331
+ Because the Databricks Files API requires the full content to be
332
+ available at upload time (it is not a streaming multipart API), we
333
+ accumulate all ``write()`` calls in a ``BytesIO`` buffer and perform
334
+ a single ``files.upload()`` call when the buffer is closed.
335
+
336
+ The upload is triggered exactly once, either by an explicit ``close()``
337
+ call or when used as a context manager (``with fs.open(path, "wb") as f``).
338
+
339
+ Example:
340
+ >>> with fs.open("/Volumes/catalog/schema/vol/file.csv", "wb") as f:
341
+ ... f.write(b"col1,col2\\n1,2\\n")
342
+ """
343
+
344
+ def __init__(self, client, path: str):
345
+ """Initialize the buffer.
346
+
347
+ Args:
348
+ client: An authenticated ``WorkspaceClient`` instance.
349
+ path: Absolute Volume path where the file will be written,
350
+ e.g. ``/Volumes/catalog/schema/volume/subdir/file.csv``.
351
+ """
352
+ self._client = client
353
+ self._path = path
354
+ self._buf = io.BytesIO()
355
+
356
+ def write(self, data: bytes) -> int:
357
+ """Append *data* to the in-memory buffer.
358
+
359
+ No network call is made at this point.
360
+
361
+ Args:
362
+ data: Bytes to buffer.
363
+
364
+ Returns:
365
+ Number of bytes written.
366
+ """
367
+ return self._buf.write(data)
368
+
369
+ def close(self):
370
+ """Flush the buffer to the Databricks Volume and close the stream.
371
+
372
+ Performs a single ``files.upload()`` call with the accumulated
373
+ buffer contents. Subsequent calls are no-ops (guarded by
374
+ ``self.closed``).
375
+ """
376
+ if not self.closed:
377
+ self._buf.seek(0)
378
+ self._client.files.upload(self._path, self._buf, overwrite=True)
379
+
380
+ super().close()
381
+
382
+ def readable(self) -> bool:
383
+ """Return ``False`` — this stream is write-only.
384
+
385
+ Returns:
386
+ Always ``False``.
387
+ """
388
+ return False
389
+
390
+ def writable(self) -> bool:
391
+ """Return ``True`` — this stream accepts ``write()`` calls.
392
+
393
+ Returns:
394
+ Always ``True``.
395
+ """
396
+ return True
397
+
398
+ def seekable(self) -> bool:
399
+ """Return ``False`` — seeking is not supported on the public interface.
400
+
401
+ The internal ``BytesIO`` buffer is seeked internally by ``close()``
402
+ before uploading, but callers must not rely on seek support.
403
+
404
+ Returns:
405
+ Always ``False``.
406
+ """
407
+ return False
408
+
409
+ def __enter__(self):
410
+ """Return *self* to support usage as a context manager.
411
+
412
+ Returns:
413
+ This ``_WriteBuffer`` instance.
414
+ """
415
+ return self
416
+
417
+ def __exit__(self, *args):
418
+ """Close the buffer and trigger the upload on context manager exit.
419
+
420
+ Args:
421
+ *args: Exception info (type, value, traceback) — ignored.
422
+ """
423
+ self.close()
424
+
425
+
426
+ # ---------------------------------------------------------------------------
427
+ # DVC plugin — dvc_objects.FileSystem wrapper
428
+ # ---------------------------------------------------------------------------
429
+
430
+
431
+ class DatabricksVolumesFileSystem(FileSystem):
432
+ """DVC remote filesystem backed by Databricks Unity Catalog Volumes.
433
+
434
+ Extends ``dvc_objects.fs.base.FileSystem``.
435
+
436
+ DVC delegates all storage operations to ``self.fs`` (a
437
+ ``_DatabricksVolumesFS`` instance), which communicates with the
438
+ Databricks Volume via the SDK Files API — no direct S3 access.
439
+
440
+ Configuration (one-time setup per repo):
441
+
442
+ dvc remote add -d myremote \\
443
+ dbvol:///Volumes/catalog/schema/volume/dvc_cache
444
+ export DATABRICKS_CONFIG_PROFILE=<profile>
445
+
446
+ After that, standard DVC commands work without any code changes:
447
+
448
+ dvc push / dvc pull / dvc status
449
+
450
+ Note:
451
+ ``DATABRICKS_CONFIG_PROFILE`` must be set in the environment because
452
+ DVC remotes do not support arbitrary config keys. The profile cannot
453
+ be stored in ``.dvc/config``.
454
+ """
455
+
456
+ protocol = "dbvol"
457
+ PARAM_CHECKSUM = "md5"
458
+ # Format: {"pip_package_name": "importable.module.name"}
459
+ # dvc_objects calls find_spec() on the value to check the dep is installed.
460
+ REQUIRES: ClassVar[dict[str, str]] = {"databricks-sdk": "databricks.sdk"}
461
+
462
+ def __init__(self, **config):
463
+ """Parse DVC remote config and prepare the filesystem.
464
+
465
+ Args:
466
+ **config: DVC remote configuration dict. Expected keys:
467
+
468
+ - ``url`` (str): Full remote URL, e.g.
469
+ ``dbvol:///Volumes/catalog/schema/volume/path``.
470
+ - ``profile`` (str, optional): Databricks CLI profile name.
471
+ Falls back to ``DATABRICKS_CONFIG_PROFILE`` env var.
472
+ """
473
+ super().__init__(**config)
474
+ self.url = config["url"]
475
+ self._profile = config.get("profile") or os.environ.get(
476
+ "DATABRICKS_CONFIG_PROFILE"
477
+ )
478
+ self._fs_instance: _DatabricksVolumesFS | None = None
479
+ self._fs_lock = threading.RLock()
480
+
481
+ @staticmethod
482
+ def _get_kwargs_from_urls(urlpath: str) -> dict:
483
+ """Extract constructor kwargs from a remote URL.
484
+
485
+ Called by DVC when parsing ``dvc remote add`` URLs. Returns the
486
+ URL as-is so it can be forwarded to ``__init__`` as ``config["url"]``.
487
+
488
+ Args:
489
+ urlpath: Full remote URL, e.g.
490
+ ``dbvol:///Volumes/catalog/schema/volume/path``.
491
+
492
+ Returns:
493
+ Dict with a single ``url`` key.
494
+ """
495
+ return {"url": urlpath}
496
+
497
+ @classmethod
498
+ def _strip_protocol(cls, path: str) -> str:
499
+ """Remove the ``dbvol://`` prefix and ensure a leading slash.
500
+
501
+ Args:
502
+ path: Raw path, possibly prefixed with ``dbvol://``.
503
+
504
+ Returns:
505
+ Absolute path string starting with ``/``.
506
+ """
507
+ if isinstance(path, list):
508
+ return [cls._strip_protocol(p) for p in path]
509
+
510
+ if path.startswith("dbvol://"):
511
+ path = path[len("dbvol://") :]
512
+
513
+ if not path.startswith("/"):
514
+ path = "/" + path
515
+
516
+ return path
517
+
518
+ def unstrip_protocol(self, path: str) -> str:
519
+ """Reconstruct the full ``dbvol://`` URL from an absolute path.
520
+
521
+ Args:
522
+ path: Absolute Volume path, e.g.
523
+ ``/Volumes/catalog/schema/volume/file``.
524
+
525
+ Returns:
526
+ Full URL string, e.g.
527
+ ``dbvol:///Volumes/catalog/schema/volume/file``.
528
+ """
529
+ return f"dbvol://{path}"
530
+
531
+ @property
532
+ def fs(self) -> _DatabricksVolumesFS:
533
+ """Return the underlying fsspec filesystem, created lazily and cached.
534
+
535
+ Thread-safe: uses an ``RLock`` to ensure only one instance is created
536
+ even under concurrent access.
537
+
538
+ Returns:
539
+ A ``_DatabricksVolumesFS`` instance authenticated with the
540
+ configured Databricks profile.
541
+ """
542
+ with self._fs_lock:
543
+ if self._fs_instance is None:
544
+ self._fs_instance = _DatabricksVolumesFS(profile=self._profile)
545
+
546
+ return self._fs_instance
@@ -0,0 +1,213 @@
1
+ Metadata-Version: 2.4
2
+ Name: dvc-databricks
3
+ Version: 1.0.3
4
+ Summary: DVC remote plugin for Databricks Unity Catalog Volumes
5
+ Project-URL: Homepage, https://github.com/ogreyesp/dvc-databricks
6
+ Project-URL: Repository, https://github.com/ogreyesp/dvc-databricks
7
+ Project-URL: Issues, https://github.com/ogreyesp/dvc-databricks/issues
8
+ Author: Óscar Reyes
9
+ License: MIT License
10
+
11
+ Copyright (c) 2026 Oscar Gabriel Reyes Pupo
12
+
13
+ Permission is hereby granted, free of charge, to any person obtaining a copy
14
+ of this software and associated documentation files (the "Software"), to deal
15
+ in the Software without restriction, including without limitation the rights
16
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
17
+ copies of the Software, and to permit persons to whom the Software is
18
+ furnished to do so, subject to the following conditions:
19
+
20
+ The above copyright notice and this permission notice shall be included in all
21
+ copies or substantial portions of the Software.
22
+
23
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
26
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
28
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
29
+ SOFTWARE.
30
+ License-File: LICENSE
31
+ Keywords: data-versioning,databricks,dvc,mlops,unity-catalog
32
+ Classifier: Development Status :: 3 - Alpha
33
+ Classifier: Intended Audience :: Developers
34
+ Classifier: Intended Audience :: Science/Research
35
+ Classifier: License :: OSI Approved :: MIT License
36
+ Classifier: Programming Language :: Python :: 3
37
+ Classifier: Programming Language :: Python :: 3.10
38
+ Classifier: Programming Language :: Python :: 3.11
39
+ Classifier: Programming Language :: Python :: 3.12
40
+ Classifier: Programming Language :: Python :: 3.13
41
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
42
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
43
+ Requires-Python: >=3.10
44
+ Requires-Dist: databricks-sdk>=0.40.0
45
+ Requires-Dist: dvc-objects>=5.0
46
+ Requires-Dist: dvc>=3.0
47
+ Requires-Dist: fsspec
48
+ Description-Content-Type: text/markdown
49
+
50
+ # dvc-databricks
51
+
52
+ A [DVC](https://dvc.org) remote storage plugin that enables data versioning on **Databricks Unity Catalog Volumes**.
53
+
54
+ Store large data files on Databricks Volumes (backed by S3 or ADLS), keep only lightweight `.dvc` pointer files in your git repository, and use standard DVC commands — no custom code required.
55
+
56
+ ```bash
57
+ dvc push # uploads data to Databricks Volume via Databricks SDK
58
+ dvc pull # downloads data from Databricks Volume
59
+ ```
60
+
61
+ ---
62
+
63
+ ## Why this plugin?
64
+
65
+ Databricks Unity Catalog Volumes cannot be accessed like a plain S3 bucket — all I/O should go through the **Databricks Files API**. This plugin bridges DVC and the Databricks SDK so you can version and share datasets stored on Volumes without ever leaving the standard DVC workflow.
66
+
67
+ ---
68
+
69
+ ## Requirements
70
+
71
+ - Python >= 3.10
72
+ - [DVC](https://dvc.org/doc/install) >= 3.0
73
+ - [Databricks CLI](https://docs.databricks.com/en/dev-tools/cli/install.html) configured with a profile in `~/.databrickscfg`
74
+ - Access to a Databricks Unity Catalog Volume
75
+
76
+ ---
77
+
78
+ ## Installation
79
+
80
+ ```bash
81
+ pip install dvc-databricks
82
+ ```
83
+
84
+ Once installed, the `dbvol://` remote protocol is automatically available to DVC in every process — no imports or additional configuration needed.
85
+
86
+ ---
87
+
88
+ ## Setup
89
+
90
+ ### 1. Initialize DVC in your repository (if not already done)
91
+
92
+ ```bash
93
+ dvc init
94
+ git add .dvc
95
+ git commit -m "initialize DVC"
96
+ ```
97
+
98
+ ### 2. Add the Databricks Volume as a DVC remote
99
+
100
+ ```bash
101
+ dvc remote add -d myremote \
102
+ dbvol:///Volumes/<catalog>/<schema>/<volume>/<path>
103
+ ```
104
+
105
+ Example:
106
+
107
+ ```bash
108
+ dvc remote add -d myremote \
109
+ dbvol:///Volumes/ml_catalog/datasets/storage/dvc_cache
110
+ ```
111
+
112
+ ### 3. Set your Databricks profile
113
+
114
+ ```bash
115
+ export DATABRICKS_CONFIG_PROFILE=<your-profile-name>
116
+ ```
117
+
118
+ > **Note:** DVC remotes do not support arbitrary config keys, so the Databricks
119
+ > profile must be provided via this environment variable — it cannot be stored
120
+ > in `.dvc/config`. Add the export to your `~/.zshrc` or `~/.bashrc` to make
121
+ > it permanent.
122
+
123
+ ---
124
+
125
+ ## Usage
126
+
127
+ ### Track a data file
128
+
129
+ ```bash
130
+ dvc add data/dataset.csv
131
+ ```
132
+
133
+ This creates `data/dataset.csv.dvc` — a small pointer file that goes into git.
134
+ The actual data file must listed in `.gitignore`.
135
+
136
+ ### Push data to the Volume
137
+
138
+ ```bash
139
+ dvc push
140
+ ```
141
+
142
+ Uploads the file to your Databricks Volume via the Databricks SDK.
143
+
144
+ ### Commit the pointer to git
145
+
146
+ ```bash
147
+ git add data/dataset.csv.dvc .gitignore
148
+ git commit -m "track dataset v1 with DVC"
149
+ git push
150
+ ```
151
+
152
+ ### Pull data in another environment
153
+
154
+ ```bash
155
+ git clone <your-repo>
156
+ pip install dvc-databricks
157
+ export DATABRICKS_CONFIG_PROFILE=<your-profile-name>
158
+ dvc pull
159
+ ```
160
+
161
+ ---
162
+
163
+ ## How it works
164
+
165
+ ```
166
+ Your git repo Databricks Volume (S3 / ADLS)
167
+ ────────────────── ───────────────────────────────────
168
+ data/dataset.csv.dvc ──────► /Volumes/catalog/schema/vol/
169
+ .dvc/config └── files/md5/
170
+ ├── ab/cdef1234... ← actual data
171
+ └── 9f/123abc... ← actual data
172
+ ```
173
+
174
+ **`dvc add`** hashes the file and stores it in the local DVC cache (`.dvc/cache`).
175
+ A `.dvc` pointer file containing the MD5 hash is created next to your data file.
176
+
177
+ **`dvc push`** uploads from the local cache to the Volume using the Databricks
178
+ Files API (`WorkspaceClient.files.upload`). Files are stored content-addressed:
179
+ `<volume_path>/files/md5/<hash[:2]>/<hash[2:]>`.
180
+
181
+ **`dvc pull`** downloads from the Volume into the local cache, then restores
182
+ the file to its original path.
183
+
184
+ Only `.dvc` pointer files are ever committed to git — the data stays on the Volume.
185
+
186
+ ---
187
+
188
+ ## Architecture
189
+
190
+ The plugin follows the same pattern as official DVC plugins (`dvc-gdrive`, `dvc-s3`):
191
+
192
+ | Class | Base | Role |
193
+ |-------|------|------|
194
+ | `DatabricksVolumesFileSystem` | `dvc_objects.FileSystem` | DVC-facing layer: config, checksum strategy, dependency check |
195
+ | `_DatabricksVolumesFS` | `fsspec.AbstractFileSystem` | I/O layer: all Databricks SDK calls |
196
+
197
+ A `.pth` file installed into `site-packages` ensures the plugin is loaded at
198
+ Python startup in every process (including DVC CLI subprocesses), without
199
+ requiring any manual imports.
200
+
201
+ ---
202
+
203
+ ## Environment variables
204
+
205
+ | Variable | Description |
206
+ |----------|-------------|
207
+ | `DATABRICKS_CONFIG_PROFILE` | Databricks CLI profile name from `~/.databrickscfg`. Falls back to the default profile if not set. |
208
+
209
+ ---
210
+
211
+ ## License
212
+
213
+ [MIT](LICENSE) © Óscar Reyes
@@ -0,0 +1,7 @@
1
+ dvc_databricks/__init__.py,sha256=-qPDUkQNhaT5XsgnDsxz0HR3h4BhI-7oXrxksqPvDqQ,647
2
+ dvc_databricks/filesystem.py,sha256=D2SWtGMs2QknTwn0PHTYilsEYN_80U8YDfLotaVVny0,18668
3
+ dvc_databricks_startup.pth,sha256=eXTxlJfR7nQVmrHlwVnuDGqfT4_e1KxSE_8vGOHn2BY,22
4
+ dvc_databricks-1.0.3.dist-info/METADATA,sha256=Mo0Q1pnqJ44skaA54ox29Ew5An_4U8D35hKxxofHTe8,7024
5
+ dvc_databricks-1.0.3.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
6
+ dvc_databricks-1.0.3.dist-info/licenses/LICENSE,sha256=Vul_QQsauZi10NIVpyaobqFqBg_t9Hv5FwjfsNe1wHc,1081
7
+ dvc_databricks-1.0.3.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.29.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Oscar Gabriel Reyes Pupo
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1 @@
1
+ import dvc_databricks