dvc-databricks 1.0.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,35 @@
1
+ name: CI
2
+
3
+ on:
4
+ pull_request:
5
+ branches:
6
+ - main
7
+
8
+ jobs:
9
+ lint-and-build:
10
+ name: Lint & Build
11
+ runs-on: ubuntu-latest
12
+
13
+ steps:
14
+ - name: Checkout
15
+ uses: actions/checkout@v4
16
+
17
+ - name: Set up Python
18
+ uses: actions/setup-python@v5
19
+ with:
20
+ python-version: "3.12"
21
+
22
+ - name: Install build tools
23
+ run: pip install hatchling
24
+
25
+ - name: Check package builds cleanly
26
+ run: python -m hatchling build
27
+
28
+ - name: Verify package installs and registers dbvol://
29
+ run: |
30
+ pip install dist/*.whl
31
+ python -c "
32
+ from dvc_objects.fs import known_implementations
33
+ assert 'dbvol' in known_implementations, 'dbvol not registered!'
34
+ print('dbvol registered successfully:', known_implementations['dbvol'])
35
+ "
@@ -0,0 +1,47 @@
1
+ name: Release
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - main
7
+
8
+ permissions:
9
+ contents: write
10
+
11
+ jobs:
12
+ release:
13
+ name: Semantic Release & PyPI Publish
14
+ runs-on: ubuntu-latest
15
+ concurrency: release
16
+
17
+ steps:
18
+ - name: Checkout
19
+ uses: actions/checkout@v4
20
+ with:
21
+ fetch-depth: 0
22
+ token: ${{ secrets.GITHUB_TOKEN }}
23
+
24
+ - name: Set up Python
25
+ uses: actions/setup-python@v5
26
+ with:
27
+ python-version: "3.12"
28
+
29
+ - name: Install tools
30
+ run: pip install python-semantic-release hatchling twine
31
+
32
+ - name: Run semantic release (version bump + GitHub release)
33
+ env:
34
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
35
+ run: |
36
+ git config user.name "github-actions[bot]"
37
+ git config user.email "github-actions[bot]@users.noreply.github.com"
38
+ semantic-release version
39
+
40
+ - name: Build package
41
+ run: python -m hatchling build
42
+
43
+ - name: Publish to PyPI
44
+ env:
45
+ TWINE_USERNAME: __token__
46
+ TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
47
+ run: twine upload dist/*
@@ -0,0 +1,211 @@
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[codz]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # PyInstaller
30
+ # Usually these files are written by a python script from a template
31
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
32
+ *.manifest
33
+ *.spec
34
+
35
+ # Installer logs
36
+ pip-log.txt
37
+ pip-delete-this-directory.txt
38
+
39
+ # Unit test / coverage reports
40
+ htmlcov/
41
+ .tox/
42
+ .nox/
43
+ .coverage
44
+ .coverage.*
45
+ .cache
46
+ nosetests.xml
47
+ coverage.xml
48
+ *.cover
49
+ *.py.cover
50
+ .hypothesis/
51
+ .pytest_cache/
52
+ cover/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ .pybuilder/
76
+ target/
77
+
78
+ # Jupyter Notebook
79
+ .ipynb_checkpoints
80
+
81
+ # IPython
82
+ profile_default/
83
+ ipython_config.py
84
+
85
+ # pyenv
86
+ # For a library or package, you might want to ignore these files since the code is
87
+ # intended to run in multiple environments; otherwise, check them in:
88
+ # .python-version
89
+
90
+ # pipenv
91
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
93
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
94
+ # install all needed dependencies.
95
+ #Pipfile.lock
96
+
97
+ # UV
98
+ # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
99
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
100
+ # commonly ignored for libraries.
101
+ #uv.lock
102
+
103
+ # poetry
104
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
105
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
106
+ # commonly ignored for libraries.
107
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
108
+ #poetry.lock
109
+ #poetry.toml
110
+
111
+ # pdm
112
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
113
+ # pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
114
+ # https://pdm-project.org/en/latest/usage/project/#working-with-version-control
115
+ #pdm.lock
116
+ #pdm.toml
117
+ .pdm-python
118
+ .pdm-build/
119
+
120
+ # pixi
121
+ # Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
122
+ #pixi.lock
123
+ # Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
124
+ # in the .venv directory. It is recommended not to include this directory in version control.
125
+ .pixi
126
+
127
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
128
+ __pypackages__/
129
+
130
+ # Celery stuff
131
+ celerybeat-schedule
132
+ celerybeat.pid
133
+
134
+ # SageMath parsed files
135
+ *.sage.py
136
+
137
+ # Environments
138
+ .env
139
+ .envrc
140
+ .venv
141
+ env/
142
+ venv/
143
+ ENV/
144
+ env.bak/
145
+ venv.bak/
146
+
147
+ # Spyder project settings
148
+ .spyderproject
149
+ .spyproject
150
+
151
+ # Rope project settings
152
+ .ropeproject
153
+
154
+ # mkdocs documentation
155
+ /site
156
+
157
+ # mypy
158
+ .mypy_cache/
159
+ .dmypy.json
160
+ dmypy.json
161
+
162
+ # Pyre type checker
163
+ .pyre/
164
+
165
+ # pytype static type analyzer
166
+ .pytype/
167
+
168
+ # Cython debug symbols
169
+ cython_debug/
170
+
171
+ # PyCharm
172
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
173
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
174
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
175
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
176
+ #.idea/
177
+
178
+ # Abstra
179
+ # Abstra is an AI-powered process automation framework.
180
+ # Ignore directories containing user credentials, local state, and settings.
181
+ # Learn more at https://abstra.io/docs
182
+ .abstra/
183
+
184
+ # Visual Studio Code
185
+ # Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
186
+ # that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
187
+ # and can be added to the global gitignore or merged into this file. However, if you prefer,
188
+ # you could uncomment the following to ignore the entire vscode folder
189
+ # .vscode/
190
+
191
+ # Ruff stuff:
192
+ .ruff_cache/
193
+
194
+ # PyPI configuration file
195
+ .pypirc
196
+
197
+ # Cursor
198
+ # Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to
199
+ # exclude from AI features like autocomplete and code analysis. Recommended for sensitive data
200
+ # refer to https://docs.cursor.com/context/ignore-files
201
+ .cursorignore
202
+ .cursorindexingignore
203
+
204
+ # Marimo
205
+ marimo/_static/
206
+ marimo/_lsp/
207
+ __marimo__/
208
+
209
+ # DVC local cache and temp files (never committed)
210
+ .dvc/cache
211
+ .dvc/tmp
@@ -0,0 +1,31 @@
1
+ # CHANGELOG
2
+
3
+ <!-- version list -->
4
+
5
+ ## v1.0.3 (2026-04-03)
6
+
7
+ ### Bug Fixes
8
+
9
+ - Remove broken output command from release workflow
10
+ ([`d91f6a1`](https://github.com/ogreyesp/dvc-databricks/commit/d91f6a1b07747430091c69689104cc27c9ddea56))
11
+
12
+
13
+ ## v1.0.2 (2026-04-03)
14
+
15
+ ### Bug Fixes
16
+
17
+ - Separate CI from release workflow and use twine for PyPI upload
18
+ ([`3f8783d`](https://github.com/ogreyesp/dvc-databricks/commit/3f8783dc260a6f5d965bdde48d8ede4bd83718d3))
19
+
20
+
21
+ ## v1.0.1 (2026-04-03)
22
+
23
+ ### Bug Fixes
24
+
25
+ - Include .pth file in wheel root via force-include
26
+ ([`43637ea`](https://github.com/ogreyesp/dvc-databricks/commit/43637ea1da0903c797e18a2e7f3ebd18b7fdb66d))
27
+
28
+
29
+ ## v1.0.0 (2026-04-03)
30
+
31
+ - Initial Release
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Oscar Gabriel Reyes Pupo
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,213 @@
1
+ Metadata-Version: 2.4
2
+ Name: dvc-databricks
3
+ Version: 1.0.3
4
+ Summary: DVC remote plugin for Databricks Unity Catalog Volumes
5
+ Project-URL: Homepage, https://github.com/ogreyesp/dvc-databricks
6
+ Project-URL: Repository, https://github.com/ogreyesp/dvc-databricks
7
+ Project-URL: Issues, https://github.com/ogreyesp/dvc-databricks/issues
8
+ Author: Óscar Reyes
9
+ License: MIT License
10
+
11
+ Copyright (c) 2026 Oscar Gabriel Reyes Pupo
12
+
13
+ Permission is hereby granted, free of charge, to any person obtaining a copy
14
+ of this software and associated documentation files (the "Software"), to deal
15
+ in the Software without restriction, including without limitation the rights
16
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
17
+ copies of the Software, and to permit persons to whom the Software is
18
+ furnished to do so, subject to the following conditions:
19
+
20
+ The above copyright notice and this permission notice shall be included in all
21
+ copies or substantial portions of the Software.
22
+
23
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
26
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
28
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
29
+ SOFTWARE.
30
+ License-File: LICENSE
31
+ Keywords: data-versioning,databricks,dvc,mlops,unity-catalog
32
+ Classifier: Development Status :: 3 - Alpha
33
+ Classifier: Intended Audience :: Developers
34
+ Classifier: Intended Audience :: Science/Research
35
+ Classifier: License :: OSI Approved :: MIT License
36
+ Classifier: Programming Language :: Python :: 3
37
+ Classifier: Programming Language :: Python :: 3.10
38
+ Classifier: Programming Language :: Python :: 3.11
39
+ Classifier: Programming Language :: Python :: 3.12
40
+ Classifier: Programming Language :: Python :: 3.13
41
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
42
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
43
+ Requires-Python: >=3.10
44
+ Requires-Dist: databricks-sdk>=0.40.0
45
+ Requires-Dist: dvc-objects>=5.0
46
+ Requires-Dist: dvc>=3.0
47
+ Requires-Dist: fsspec
48
+ Description-Content-Type: text/markdown
49
+
50
+ # dvc-databricks
51
+
52
+ A [DVC](https://dvc.org) remote storage plugin that enables data versioning on **Databricks Unity Catalog Volumes**.
53
+
54
+ Store large data files on Databricks Volumes (backed by S3 or ADLS), keep only lightweight `.dvc` pointer files in your git repository, and use standard DVC commands — no custom code required.
55
+
56
+ ```bash
57
+ dvc push # uploads data to Databricks Volume via Databricks SDK
58
+ dvc pull # downloads data from Databricks Volume
59
+ ```
60
+
61
+ ---
62
+
63
+ ## Why this plugin?
64
+
65
+ Databricks Unity Catalog Volumes cannot be accessed like a plain S3 bucket — all I/O should go through the **Databricks Files API**. This plugin bridges DVC and the Databricks SDK so you can version and share datasets stored on Volumes without ever leaving the standard DVC workflow.
66
+
67
+ ---
68
+
69
+ ## Requirements
70
+
71
+ - Python >= 3.10
72
+ - [DVC](https://dvc.org/doc/install) >= 3.0
73
+ - [Databricks CLI](https://docs.databricks.com/en/dev-tools/cli/install.html) configured with a profile in `~/.databrickscfg`
74
+ - Access to a Databricks Unity Catalog Volume
75
+
76
+ ---
77
+
78
+ ## Installation
79
+
80
+ ```bash
81
+ pip install dvc-databricks
82
+ ```
83
+
84
+ Once installed, the `dbvol://` remote protocol is automatically available to DVC in every process — no imports or additional configuration needed.
85
+
86
+ ---
87
+
88
+ ## Setup
89
+
90
+ ### 1. Initialize DVC in your repository (if not already done)
91
+
92
+ ```bash
93
+ dvc init
94
+ git add .dvc
95
+ git commit -m "initialize DVC"
96
+ ```
97
+
98
+ ### 2. Add the Databricks Volume as a DVC remote
99
+
100
+ ```bash
101
+ dvc remote add -d myremote \
102
+ dbvol:///Volumes/<catalog>/<schema>/<volume>/<path>
103
+ ```
104
+
105
+ Example:
106
+
107
+ ```bash
108
+ dvc remote add -d myremote \
109
+ dbvol:///Volumes/ml_catalog/datasets/storage/dvc_cache
110
+ ```
111
+
112
+ ### 3. Set your Databricks profile
113
+
114
+ ```bash
115
+ export DATABRICKS_CONFIG_PROFILE=<your-profile-name>
116
+ ```
117
+
118
+ > **Note:** DVC remotes do not support arbitrary config keys, so the Databricks
119
+ > profile must be provided via this environment variable — it cannot be stored
120
+ > in `.dvc/config`. Add the export to your `~/.zshrc` or `~/.bashrc` to make
121
+ > it permanent.
122
+
123
+ ---
124
+
125
+ ## Usage
126
+
127
+ ### Track a data file
128
+
129
+ ```bash
130
+ dvc add data/dataset.csv
131
+ ```
132
+
133
+ This creates `data/dataset.csv.dvc` — a small pointer file that goes into git.
134
+ The actual data file must listed in `.gitignore`.
135
+
136
+ ### Push data to the Volume
137
+
138
+ ```bash
139
+ dvc push
140
+ ```
141
+
142
+ Uploads the file to your Databricks Volume via the Databricks SDK.
143
+
144
+ ### Commit the pointer to git
145
+
146
+ ```bash
147
+ git add data/dataset.csv.dvc .gitignore
148
+ git commit -m "track dataset v1 with DVC"
149
+ git push
150
+ ```
151
+
152
+ ### Pull data in another environment
153
+
154
+ ```bash
155
+ git clone <your-repo>
156
+ pip install dvc-databricks
157
+ export DATABRICKS_CONFIG_PROFILE=<your-profile-name>
158
+ dvc pull
159
+ ```
160
+
161
+ ---
162
+
163
+ ## How it works
164
+
165
+ ```
166
+ Your git repo Databricks Volume (S3 / ADLS)
167
+ ────────────────── ───────────────────────────────────
168
+ data/dataset.csv.dvc ──────► /Volumes/catalog/schema/vol/
169
+ .dvc/config └── files/md5/
170
+ ├── ab/cdef1234... ← actual data
171
+ └── 9f/123abc... ← actual data
172
+ ```
173
+
174
+ **`dvc add`** hashes the file and stores it in the local DVC cache (`.dvc/cache`).
175
+ A `.dvc` pointer file containing the MD5 hash is created next to your data file.
176
+
177
+ **`dvc push`** uploads from the local cache to the Volume using the Databricks
178
+ Files API (`WorkspaceClient.files.upload`). Files are stored content-addressed:
179
+ `<volume_path>/files/md5/<hash[:2]>/<hash[2:]>`.
180
+
181
+ **`dvc pull`** downloads from the Volume into the local cache, then restores
182
+ the file to its original path.
183
+
184
+ Only `.dvc` pointer files are ever committed to git — the data stays on the Volume.
185
+
186
+ ---
187
+
188
+ ## Architecture
189
+
190
+ The plugin follows the same pattern as official DVC plugins (`dvc-gdrive`, `dvc-s3`):
191
+
192
+ | Class | Base | Role |
193
+ |-------|------|------|
194
+ | `DatabricksVolumesFileSystem` | `dvc_objects.FileSystem` | DVC-facing layer: config, checksum strategy, dependency check |
195
+ | `_DatabricksVolumesFS` | `fsspec.AbstractFileSystem` | I/O layer: all Databricks SDK calls |
196
+
197
+ A `.pth` file installed into `site-packages` ensures the plugin is loaded at
198
+ Python startup in every process (including DVC CLI subprocesses), without
199
+ requiring any manual imports.
200
+
201
+ ---
202
+
203
+ ## Environment variables
204
+
205
+ | Variable | Description |
206
+ |----------|-------------|
207
+ | `DATABRICKS_CONFIG_PROFILE` | Databricks CLI profile name from `~/.databrickscfg`. Falls back to the default profile if not set. |
208
+
209
+ ---
210
+
211
+ ## License
212
+
213
+ [MIT](LICENSE) © Óscar Reyes
@@ -0,0 +1,164 @@
1
+ # dvc-databricks
2
+
3
+ A [DVC](https://dvc.org) remote storage plugin that enables data versioning on **Databricks Unity Catalog Volumes**.
4
+
5
+ Store large data files on Databricks Volumes (backed by S3 or ADLS), keep only lightweight `.dvc` pointer files in your git repository, and use standard DVC commands — no custom code required.
6
+
7
+ ```bash
8
+ dvc push # uploads data to Databricks Volume via Databricks SDK
9
+ dvc pull # downloads data from Databricks Volume
10
+ ```
11
+
12
+ ---
13
+
14
+ ## Why this plugin?
15
+
16
+ Databricks Unity Catalog Volumes cannot be accessed like a plain S3 bucket — all I/O should go through the **Databricks Files API**. This plugin bridges DVC and the Databricks SDK so you can version and share datasets stored on Volumes without ever leaving the standard DVC workflow.
17
+
18
+ ---
19
+
20
+ ## Requirements
21
+
22
+ - Python >= 3.10
23
+ - [DVC](https://dvc.org/doc/install) >= 3.0
24
+ - [Databricks CLI](https://docs.databricks.com/en/dev-tools/cli/install.html) configured with a profile in `~/.databrickscfg`
25
+ - Access to a Databricks Unity Catalog Volume
26
+
27
+ ---
28
+
29
+ ## Installation
30
+
31
+ ```bash
32
+ pip install dvc-databricks
33
+ ```
34
+
35
+ Once installed, the `dbvol://` remote protocol is automatically available to DVC in every process — no imports or additional configuration needed.
36
+
37
+ ---
38
+
39
+ ## Setup
40
+
41
+ ### 1. Initialize DVC in your repository (if not already done)
42
+
43
+ ```bash
44
+ dvc init
45
+ git add .dvc
46
+ git commit -m "initialize DVC"
47
+ ```
48
+
49
+ ### 2. Add the Databricks Volume as a DVC remote
50
+
51
+ ```bash
52
+ dvc remote add -d myremote \
53
+ dbvol:///Volumes/<catalog>/<schema>/<volume>/<path>
54
+ ```
55
+
56
+ Example:
57
+
58
+ ```bash
59
+ dvc remote add -d myremote \
60
+ dbvol:///Volumes/ml_catalog/datasets/storage/dvc_cache
61
+ ```
62
+
63
+ ### 3. Set your Databricks profile
64
+
65
+ ```bash
66
+ export DATABRICKS_CONFIG_PROFILE=<your-profile-name>
67
+ ```
68
+
69
+ > **Note:** DVC remotes do not support arbitrary config keys, so the Databricks
70
+ > profile must be provided via this environment variable — it cannot be stored
71
+ > in `.dvc/config`. Add the export to your `~/.zshrc` or `~/.bashrc` to make
72
+ > it permanent.
73
+
74
+ ---
75
+
76
+ ## Usage
77
+
78
+ ### Track a data file
79
+
80
+ ```bash
81
+ dvc add data/dataset.csv
82
+ ```
83
+
84
+ This creates `data/dataset.csv.dvc` — a small pointer file that goes into git.
85
+ The actual data file must listed in `.gitignore`.
86
+
87
+ ### Push data to the Volume
88
+
89
+ ```bash
90
+ dvc push
91
+ ```
92
+
93
+ Uploads the file to your Databricks Volume via the Databricks SDK.
94
+
95
+ ### Commit the pointer to git
96
+
97
+ ```bash
98
+ git add data/dataset.csv.dvc .gitignore
99
+ git commit -m "track dataset v1 with DVC"
100
+ git push
101
+ ```
102
+
103
+ ### Pull data in another environment
104
+
105
+ ```bash
106
+ git clone <your-repo>
107
+ pip install dvc-databricks
108
+ export DATABRICKS_CONFIG_PROFILE=<your-profile-name>
109
+ dvc pull
110
+ ```
111
+
112
+ ---
113
+
114
+ ## How it works
115
+
116
+ ```
117
+ Your git repo Databricks Volume (S3 / ADLS)
118
+ ────────────────── ───────────────────────────────────
119
+ data/dataset.csv.dvc ──────► /Volumes/catalog/schema/vol/
120
+ .dvc/config └── files/md5/
121
+ ├── ab/cdef1234... ← actual data
122
+ └── 9f/123abc... ← actual data
123
+ ```
124
+
125
+ **`dvc add`** hashes the file and stores it in the local DVC cache (`.dvc/cache`).
126
+ A `.dvc` pointer file containing the MD5 hash is created next to your data file.
127
+
128
+ **`dvc push`** uploads from the local cache to the Volume using the Databricks
129
+ Files API (`WorkspaceClient.files.upload`). Files are stored content-addressed:
130
+ `<volume_path>/files/md5/<hash[:2]>/<hash[2:]>`.
131
+
132
+ **`dvc pull`** downloads from the Volume into the local cache, then restores
133
+ the file to its original path.
134
+
135
+ Only `.dvc` pointer files are ever committed to git — the data stays on the Volume.
136
+
137
+ ---
138
+
139
+ ## Architecture
140
+
141
+ The plugin follows the same pattern as official DVC plugins (`dvc-gdrive`, `dvc-s3`):
142
+
143
+ | Class | Base | Role |
144
+ |-------|------|------|
145
+ | `DatabricksVolumesFileSystem` | `dvc_objects.FileSystem` | DVC-facing layer: config, checksum strategy, dependency check |
146
+ | `_DatabricksVolumesFS` | `fsspec.AbstractFileSystem` | I/O layer: all Databricks SDK calls |
147
+
148
+ A `.pth` file installed into `site-packages` ensures the plugin is loaded at
149
+ Python startup in every process (including DVC CLI subprocesses), without
150
+ requiring any manual imports.
151
+
152
+ ---
153
+
154
+ ## Environment variables
155
+
156
+ | Variable | Description |
157
+ |----------|-------------|
158
+ | `DATABRICKS_CONFIG_PROFILE` | Databricks CLI profile name from `~/.databrickscfg`. Falls back to the default profile if not set. |
159
+
160
+ ---
161
+
162
+ ## License
163
+
164
+ [MIT](LICENSE) © Óscar Reyes
@@ -0,0 +1,80 @@
1
+ [project]
2
+ name = "dvc-databricks"
3
+ version = "1.0.3"
4
+ description = "DVC remote plugin for Databricks Unity Catalog Volumes"
5
+ readme = "README.md"
6
+ license = { file = "LICENSE" }
7
+ authors = [{ name = "Óscar Reyes" }]
8
+ keywords = ["dvc", "databricks", "data-versioning", "mlops", "unity-catalog"]
9
+ classifiers = [
10
+ "Development Status :: 3 - Alpha",
11
+ "Intended Audience :: Developers",
12
+ "Intended Audience :: Science/Research",
13
+ "License :: OSI Approved :: MIT License",
14
+ "Programming Language :: Python :: 3",
15
+ "Programming Language :: Python :: 3.10",
16
+ "Programming Language :: Python :: 3.11",
17
+ "Programming Language :: Python :: 3.12",
18
+ "Programming Language :: Python :: 3.13",
19
+ "Topic :: Scientific/Engineering :: Artificial Intelligence",
20
+ "Topic :: Software Development :: Libraries :: Python Modules",
21
+ ]
22
+ requires-python = ">=3.10"
23
+ dependencies = [
24
+ "databricks-sdk>=0.40.0",
25
+ "dvc>=3.0",
26
+ "dvc-objects>=5.0",
27
+ "fsspec",
28
+ ]
29
+
30
+ [project.urls]
31
+ Homepage = "https://github.com/ogreyesp/dvc-databricks"
32
+ Repository = "https://github.com/ogreyesp/dvc-databricks"
33
+ Issues = "https://github.com/ogreyesp/dvc-databricks/issues"
34
+
35
+ [build-system]
36
+ requires = ["hatchling"]
37
+ build-backend = "hatchling.build"
38
+
39
+ [tool.hatch.build.targets.wheel]
40
+ packages = ["src/dvc_databricks"]
41
+
42
+ # force-include places dvc_databricks_startup.pth at the root of the wheel.
43
+ # pip installs wheel-root files directly into site-packages, so Python will
44
+ # execute "import dvc_databricks" at interpreter startup in every process —
45
+ # including DVC CLI subprocesses — registering the dbvol:// protocol.
46
+ [tool.hatch.build.targets.wheel.force-include]
47
+ "src/dvc_databricks_startup.pth" = "dvc_databricks_startup.pth"
48
+
49
+ [tool.semantic_release]
50
+ version_toml = ["pyproject.toml:project.version"]
51
+ branch = "main"
52
+ changelog_file = "CHANGELOG.md"
53
+ build_command = "pip install hatchling && python -m hatchling build"
54
+ dist_path = "dist/"
55
+ upload_to_pypi = false
56
+ upload_to_release = false
57
+ commit_message = "chore(release): {version} [skip ci]"
58
+
59
+ [tool.semantic_release.commit_parser_options]
60
+ allowed_tags = [
61
+ "feat",
62
+ "fix",
63
+ "perf",
64
+ "refactor",
65
+ "docs",
66
+ "chore",
67
+ "style",
68
+ "test",
69
+ "build",
70
+ "ci",
71
+ ]
72
+ minor_tags = ["feat"]
73
+ patch_tags = ["fix", "perf", "refactor"]
74
+
75
+ [tool.semantic_release.remote]
76
+ type = "github"
77
+
78
+ [tool.semantic_release.publish]
79
+ dist_glob_patterns = ["dist/*"]
80
+ upload_to_vcs_release = true
@@ -0,0 +1,17 @@
1
+ """
2
+ dvc-databricks — DVC remote plugin for Databricks Unity Catalog Volumes.
3
+
4
+ Registers the ``dbvol`` protocol into ``dvc_objects.fs.known_implementations``
5
+ so that DVC can resolve ``dbvol://`` remotes in any process where this
6
+ package is installed.
7
+
8
+ This registration runs on import. The package uses a .pth file (installed
9
+ into site-packages) to ensure this module is imported at Python startup,
10
+ which makes ``dvc push`` / ``dvc pull`` work from the CLI without any
11
+ manual imports.
12
+ """
13
+ from dvc_objects.fs import known_implementations
14
+
15
+ known_implementations["dbvol"] = {
16
+ "class": "dvc_databricks.filesystem.DatabricksVolumesFileSystem",
17
+ }
@@ -0,0 +1,546 @@
1
+ """
2
+ DVC filesystem plugin for Databricks Unity Catalog Volumes.
3
+
4
+ Architecture:
5
+
6
+ DatabricksVolumesFileSystem ← dvc_objects.FileSystem subclass
7
+ │ DVC-facing layer: config parsing,
8
+ │ checksum strategy, plugin registration
9
+
10
+ └── self.fs ← _DatabricksVolumesFS (fsspec.AbstractFileSystem)
11
+ I/O layer: upload, download, list, delete
12
+ via Databricks SDK Files API
13
+
14
+ When this package is installed, the ``dvc.plugins`` entry point registers
15
+ ``DatabricksVolumesFileSystem`` under the ``dbvol`` protocol. DVC discovers
16
+ it automatically — no imports or manual configuration required.
17
+
18
+ Users configure the remote once:
19
+
20
+ dvc remote add -d myremote dbvol:///Volumes/catalog/schema/volume/path
21
+ export DATABRICKS_CONFIG_PROFILE=<profile>
22
+
23
+ Then use standard DVC commands as usual:
24
+
25
+ dvc push / dvc pull / dvc status ...
26
+ """
27
+
28
+ from __future__ import annotations
29
+
30
+ import io
31
+ import logging
32
+ import os
33
+ import threading
34
+ from typing import ClassVar
35
+
36
+ from databricks.sdk import WorkspaceClient
37
+ from databricks.sdk.config import Config
38
+ from dvc_objects.fs.base import FileSystem
39
+ from fsspec import AbstractFileSystem
40
+
41
+ logger = logging.getLogger(__name__)
42
+
43
+
44
+ # ---------------------------------------------------------------------------
45
+ # Inner fsspec filesystem — handles raw I/O via Databricks SDK
46
+ # ---------------------------------------------------------------------------
47
+
48
+
49
+ class _DatabricksVolumesFS(AbstractFileSystem):
50
+ """fsspec filesystem that routes all I/O through the Databricks SDK Files API.
51
+
52
+ This is the I/O layer used internally by ``DatabricksVolumesFileSystem``.
53
+ It is not intended to be used directly by end users.
54
+
55
+ Args:
56
+ profile: Databricks CLI profile name from ``~/.databrickscfg``.
57
+ When ``None``, the SDK reads ``DATABRICKS_CONFIG_PROFILE`` from
58
+ the environment, then falls back to the default profile.
59
+ **storage_options: Additional options forwarded to
60
+ ``AbstractFileSystem.__init__``.
61
+ """
62
+
63
+ protocol = "dbvol"
64
+
65
+ def __init__(self, profile: str | None = None, **storage_options):
66
+
67
+ super().__init__(**storage_options)
68
+
69
+ resolved = profile or os.environ.get("DATABRICKS_CONFIG_PROFILE")
70
+ cfg = Config(profile=resolved) if resolved else Config()
71
+ self._client = WorkspaceClient(config=cfg)
72
+
73
+ # ------------------------------------------------------------------
74
+ # Path helpers
75
+ # ------------------------------------------------------------------
76
+
77
+ @classmethod
78
+ def _strip_protocol(cls, path: str) -> str:
79
+ """Remove the ``dbvol://`` prefix and ensure a leading slash.
80
+
81
+ Args:
82
+ path: Raw path, possibly prefixed with ``dbvol://``. Also accepts
83
+ a list of paths, in which case each element is processed.
84
+
85
+ Returns:
86
+ Absolute path string starting with ``/``, or a list of such
87
+ strings when the input is a list.
88
+ """
89
+ if isinstance(path, list):
90
+ return [cls._strip_protocol(p) for p in path]
91
+
92
+ path = super()._strip_protocol(path)
93
+
94
+ if not path.startswith("/"):
95
+ path = "/" + path
96
+
97
+ return path
98
+
99
+ # ------------------------------------------------------------------
100
+ # Metadata operations
101
+ # ------------------------------------------------------------------
102
+
103
+ def ls(self, path: str, detail: bool = True, **kwargs):
104
+ """List a directory or return a single-item list for a file.
105
+
106
+ Tries the path as a directory first; falls back to ``info()`` if the
107
+ directory listing raises an exception (i.e. path is a file).
108
+
109
+ Args:
110
+ path: Absolute Volume path to list.
111
+ detail: If ``True``, return a list of dicts with keys
112
+ ``name``, ``type``, ``size``, and ``last_modified``.
113
+ If ``False``, return a list of path strings.
114
+ **kwargs: Ignored; present for fsspec compatibility.
115
+
116
+ Returns:
117
+ List of dicts (when ``detail=True``) or list of path strings
118
+ (when ``detail=False``).
119
+
120
+ Raises:
121
+ FileNotFoundError: If the path does not exist.
122
+ """
123
+ path = self._strip_protocol(path)
124
+
125
+ try:
126
+ entries = list(self._client.files.list_directory_contents(path))
127
+ result = []
128
+
129
+ for entry in entries:
130
+ info = {
131
+ "name": entry.path,
132
+ "type": "directory" if entry.is_directory else "file",
133
+ "size": entry.file_size or 0,
134
+ "last_modified": entry.last_modified,
135
+ }
136
+ result.append(info if detail else entry.path)
137
+
138
+ return result
139
+
140
+ except Exception:
141
+ pass
142
+
143
+ # Fall back to a single-file lookup via info() to avoid duplicating
144
+ # the metadata retrieval logic.
145
+ info = self.info(path) # raises FileNotFoundError if path does not exist
146
+ return [info] if detail else [info["name"]]
147
+
148
+ def info(self, path: str, **kwargs) -> dict:
149
+ """Return metadata for a single file or directory.
150
+
151
+ Tries a file metadata lookup first (cheaper), then falls back to a
152
+ directory metadata lookup.
153
+
154
+ Args:
155
+ path: Absolute Volume path.
156
+ **kwargs: Ignored; present for fsspec compatibility.
157
+
158
+ Returns:
159
+ Dict with keys ``name`` (str), ``type`` (``"file"`` or
160
+ ``"directory"``), and ``size`` (int, bytes).
161
+
162
+ Raises:
163
+ FileNotFoundError: If the path does not exist.
164
+ """
165
+ path = self._strip_protocol(path)
166
+
167
+ try:
168
+ meta = self._client.files.get_metadata(path)
169
+
170
+ return {"name": path, "type": "file", "size": meta.content_length or 0}
171
+ except Exception:
172
+ pass
173
+
174
+ try:
175
+ self._client.files.get_directory_metadata(path)
176
+
177
+ return {"name": path, "type": "directory", "size": 0}
178
+ except Exception:
179
+ raise FileNotFoundError(f"No such file or directory: {path!r}")
180
+
181
+ def exists(self, path: str, **kwargs) -> bool:
182
+ """Return ``True`` if *path* exists on the Volume, ``False`` otherwise.
183
+
184
+ Args:
185
+ path: Absolute Volume path to check.
186
+ **kwargs: Ignored; present for fsspec compatibility.
187
+
188
+ Returns:
189
+ ``True`` if the path exists, ``False`` if not.
190
+ """
191
+ try:
192
+ self.info(path)
193
+ return True
194
+ except FileNotFoundError:
195
+ return False
196
+
197
+ # ------------------------------------------------------------------
198
+ # Directory operations
199
+ # ------------------------------------------------------------------
200
+
201
+ def mkdir(self, path: str, create_parents: bool = True, **kwargs):
202
+ """Create a directory on the Volume.
203
+
204
+ Args:
205
+ path: Absolute Volume path for the new directory.
206
+ create_parents: Ignored — the Databricks Files API always
207
+ creates intermediate directories automatically.
208
+ **kwargs: Ignored; present for fsspec compatibility.
209
+ """
210
+ path = self._strip_protocol(path)
211
+ self._client.files.create_directory(path)
212
+
213
+ def makedirs(self, path: str, exist_ok: bool = False):
214
+ """Create a directory and all intermediate parents.
215
+
216
+ Args:
217
+ path: Absolute Volume path for the new directory.
218
+ exist_ok: If ``False``, re-raises any exception thrown by the
219
+ API. If ``True``, suppresses those exceptions.
220
+ """
221
+ path = self._strip_protocol(path)
222
+ try:
223
+ self._client.files.create_directory(path)
224
+ except Exception:
225
+ if not exist_ok:
226
+ raise
227
+
228
+ def rm_file(self, path: str):
229
+ """Delete a single file from the Volume.
230
+
231
+ Args:
232
+ path: Absolute Volume path of the file to delete.
233
+ """
234
+ path = self._strip_protocol(path)
235
+ self._client.files.delete(path)
236
+
237
+ def rm(self, path, recursive: bool = False, **kwargs):
238
+ """Delete one or more files or directories from the Volume.
239
+
240
+ Args:
241
+ path: Absolute Volume path (str) or list of paths to delete.
242
+ recursive: If ``True``, recursively delete directory contents
243
+ before deleting the directory itself.
244
+ **kwargs: Ignored; present for fsspec compatibility.
245
+ """
246
+ paths = path if isinstance(path, list) else [path]
247
+
248
+ for p in paths:
249
+ p = self._strip_protocol(p)
250
+
251
+ if recursive and self.isdir(p):
252
+
253
+ for entry in self.ls(p, detail=True):
254
+ self.rm(entry["name"], recursive=True)
255
+ else:
256
+ self._client.files.delete(p)
257
+
258
+ # ------------------------------------------------------------------
259
+ # File I/O
260
+ # ------------------------------------------------------------------
261
+
262
+ def _open(self, path: str, mode: str = "rb", **kwargs):
263
+ """Open a file on the Volume for reading or writing.
264
+
265
+ For reads, the file is downloaded eagerly into a ``BytesIO`` buffer.
266
+ For writes, a ``_WriteBuffer`` is returned which uploads on ``close()``.
267
+
268
+ Args:
269
+ path: Absolute Volume path.
270
+ mode: ``"rb"`` for reading or ``"wb"`` for writing.
271
+ **kwargs: Ignored; present for fsspec compatibility.
272
+
273
+ Returns:
274
+ A ``BytesIO`` instance (read mode) or a ``_WriteBuffer``
275
+ instance (write mode).
276
+
277
+ Raises:
278
+ ValueError: If *mode* is neither ``"rb"`` nor ``"wb"``.
279
+ """
280
+ path = self._strip_protocol(path)
281
+
282
+ if "r" in mode:
283
+ response = self._client.files.download(path)
284
+ return io.BytesIO(response.contents.read())
285
+
286
+ if "w" in mode:
287
+ return _WriteBuffer(self._client, path)
288
+
289
+ raise ValueError(f"Unsupported mode: {mode!r}")
290
+
291
+ def put_file(self, lpath: str, rpath: str, **kwargs):
292
+ """Upload a single local file to the Volume.
293
+
294
+ Args:
295
+ lpath: Absolute local filesystem path of the source file.
296
+ rpath: Absolute Volume path of the destination.
297
+ **kwargs: Ignored; present for fsspec compatibility.
298
+ """
299
+ rpath = self._strip_protocol(rpath)
300
+
301
+ with open(lpath, "rb") as fh:
302
+ self._client.files.upload(rpath, fh, overwrite=True)
303
+
304
+ def get_file(self, rpath: str, lpath: str, outfile=None, **kwargs):
305
+ """Download a single file from the Volume to a local path.
306
+
307
+ Args:
308
+ rpath: Absolute Volume path of the source file.
309
+ lpath: Absolute local filesystem path of the destination.
310
+ Intermediate directories are created automatically.
311
+ outfile: If provided, write the downloaded bytes into this
312
+ file-like object instead of saving to *lpath*.
313
+ **kwargs: Ignored; present for fsspec compatibility.
314
+ """
315
+ rpath = self._strip_protocol(rpath)
316
+ response = self._client.files.download(rpath)
317
+
318
+ if outfile is not None:
319
+ outfile.write(response.contents.read())
320
+ else:
321
+ os.makedirs(os.path.dirname(os.path.abspath(lpath)), exist_ok=True)
322
+
323
+ with open(lpath, "wb") as fh:
324
+ fh.write(response.contents.read())
325
+
326
+
327
+ class _WriteBuffer(io.RawIOBase):
328
+ """Write-only in-memory buffer that uploads to Databricks on ``close()``.
329
+
330
+ fsspec's ``_open(mode="wb")`` contract expects a file-like object.
331
+ Because the Databricks Files API requires the full content to be
332
+ available at upload time (it is not a streaming multipart API), we
333
+ accumulate all ``write()`` calls in a ``BytesIO`` buffer and perform
334
+ a single ``files.upload()`` call when the buffer is closed.
335
+
336
+ The upload is triggered exactly once, either by an explicit ``close()``
337
+ call or when used as a context manager (``with fs.open(path, "wb") as f``).
338
+
339
+ Example:
340
+ >>> with fs.open("/Volumes/catalog/schema/vol/file.csv", "wb") as f:
341
+ ... f.write(b"col1,col2\\n1,2\\n")
342
+ """
343
+
344
+ def __init__(self, client, path: str):
345
+ """Initialize the buffer.
346
+
347
+ Args:
348
+ client: An authenticated ``WorkspaceClient`` instance.
349
+ path: Absolute Volume path where the file will be written,
350
+ e.g. ``/Volumes/catalog/schema/volume/subdir/file.csv``.
351
+ """
352
+ self._client = client
353
+ self._path = path
354
+ self._buf = io.BytesIO()
355
+
356
+ def write(self, data: bytes) -> int:
357
+ """Append *data* to the in-memory buffer.
358
+
359
+ No network call is made at this point.
360
+
361
+ Args:
362
+ data: Bytes to buffer.
363
+
364
+ Returns:
365
+ Number of bytes written.
366
+ """
367
+ return self._buf.write(data)
368
+
369
+ def close(self):
370
+ """Flush the buffer to the Databricks Volume and close the stream.
371
+
372
+ Performs a single ``files.upload()`` call with the accumulated
373
+ buffer contents. Subsequent calls are no-ops (guarded by
374
+ ``self.closed``).
375
+ """
376
+ if not self.closed:
377
+ self._buf.seek(0)
378
+ self._client.files.upload(self._path, self._buf, overwrite=True)
379
+
380
+ super().close()
381
+
382
+ def readable(self) -> bool:
383
+ """Return ``False`` — this stream is write-only.
384
+
385
+ Returns:
386
+ Always ``False``.
387
+ """
388
+ return False
389
+
390
+ def writable(self) -> bool:
391
+ """Return ``True`` — this stream accepts ``write()`` calls.
392
+
393
+ Returns:
394
+ Always ``True``.
395
+ """
396
+ return True
397
+
398
+ def seekable(self) -> bool:
399
+ """Return ``False`` — seeking is not supported on the public interface.
400
+
401
+ The internal ``BytesIO`` buffer is seeked internally by ``close()``
402
+ before uploading, but callers must not rely on seek support.
403
+
404
+ Returns:
405
+ Always ``False``.
406
+ """
407
+ return False
408
+
409
+ def __enter__(self):
410
+ """Return *self* to support usage as a context manager.
411
+
412
+ Returns:
413
+ This ``_WriteBuffer`` instance.
414
+ """
415
+ return self
416
+
417
+ def __exit__(self, *args):
418
+ """Close the buffer and trigger the upload on context manager exit.
419
+
420
+ Args:
421
+ *args: Exception info (type, value, traceback) — ignored.
422
+ """
423
+ self.close()
424
+
425
+
426
+ # ---------------------------------------------------------------------------
427
+ # DVC plugin — dvc_objects.FileSystem wrapper
428
+ # ---------------------------------------------------------------------------
429
+
430
+
431
+ class DatabricksVolumesFileSystem(FileSystem):
432
+ """DVC remote filesystem backed by Databricks Unity Catalog Volumes.
433
+
434
+ Extends ``dvc_objects.fs.base.FileSystem``.
435
+
436
+ DVC delegates all storage operations to ``self.fs`` (a
437
+ ``_DatabricksVolumesFS`` instance), which communicates with the
438
+ Databricks Volume via the SDK Files API — no direct S3 access.
439
+
440
+ Configuration (one-time setup per repo):
441
+
442
+ dvc remote add -d myremote \\
443
+ dbvol:///Volumes/catalog/schema/volume/dvc_cache
444
+ export DATABRICKS_CONFIG_PROFILE=<profile>
445
+
446
+ After that, standard DVC commands work without any code changes:
447
+
448
+ dvc push / dvc pull / dvc status
449
+
450
+ Note:
451
+ ``DATABRICKS_CONFIG_PROFILE`` must be set in the environment because
452
+ DVC remotes do not support arbitrary config keys. The profile cannot
453
+ be stored in ``.dvc/config``.
454
+ """
455
+
456
+ protocol = "dbvol"
457
+ PARAM_CHECKSUM = "md5"
458
+ # Format: {"pip_package_name": "importable.module.name"}
459
+ # dvc_objects calls find_spec() on the value to check the dep is installed.
460
+ REQUIRES: ClassVar[dict[str, str]] = {"databricks-sdk": "databricks.sdk"}
461
+
462
+ def __init__(self, **config):
463
+ """Parse DVC remote config and prepare the filesystem.
464
+
465
+ Args:
466
+ **config: DVC remote configuration dict. Expected keys:
467
+
468
+ - ``url`` (str): Full remote URL, e.g.
469
+ ``dbvol:///Volumes/catalog/schema/volume/path``.
470
+ - ``profile`` (str, optional): Databricks CLI profile name.
471
+ Falls back to ``DATABRICKS_CONFIG_PROFILE`` env var.
472
+ """
473
+ super().__init__(**config)
474
+ self.url = config["url"]
475
+ self._profile = config.get("profile") or os.environ.get(
476
+ "DATABRICKS_CONFIG_PROFILE"
477
+ )
478
+ self._fs_instance: _DatabricksVolumesFS | None = None
479
+ self._fs_lock = threading.RLock()
480
+
481
+ @staticmethod
482
+ def _get_kwargs_from_urls(urlpath: str) -> dict:
483
+ """Extract constructor kwargs from a remote URL.
484
+
485
+ Called by DVC when parsing ``dvc remote add`` URLs. Returns the
486
+ URL as-is so it can be forwarded to ``__init__`` as ``config["url"]``.
487
+
488
+ Args:
489
+ urlpath: Full remote URL, e.g.
490
+ ``dbvol:///Volumes/catalog/schema/volume/path``.
491
+
492
+ Returns:
493
+ Dict with a single ``url`` key.
494
+ """
495
+ return {"url": urlpath}
496
+
497
+ @classmethod
498
+ def _strip_protocol(cls, path: str) -> str:
499
+ """Remove the ``dbvol://`` prefix and ensure a leading slash.
500
+
501
+ Args:
502
+ path: Raw path, possibly prefixed with ``dbvol://``.
503
+
504
+ Returns:
505
+ Absolute path string starting with ``/``.
506
+ """
507
+ if isinstance(path, list):
508
+ return [cls._strip_protocol(p) for p in path]
509
+
510
+ if path.startswith("dbvol://"):
511
+ path = path[len("dbvol://") :]
512
+
513
+ if not path.startswith("/"):
514
+ path = "/" + path
515
+
516
+ return path
517
+
518
+ def unstrip_protocol(self, path: str) -> str:
519
+ """Reconstruct the full ``dbvol://`` URL from an absolute path.
520
+
521
+ Args:
522
+ path: Absolute Volume path, e.g.
523
+ ``/Volumes/catalog/schema/volume/file``.
524
+
525
+ Returns:
526
+ Full URL string, e.g.
527
+ ``dbvol:///Volumes/catalog/schema/volume/file``.
528
+ """
529
+ return f"dbvol://{path}"
530
+
531
+ @property
532
+ def fs(self) -> _DatabricksVolumesFS:
533
+ """Return the underlying fsspec filesystem, created lazily and cached.
534
+
535
+ Thread-safe: uses an ``RLock`` to ensure only one instance is created
536
+ even under concurrent access.
537
+
538
+ Returns:
539
+ A ``_DatabricksVolumesFS`` instance authenticated with the
540
+ configured Databricks profile.
541
+ """
542
+ with self._fs_lock:
543
+ if self._fs_instance is None:
544
+ self._fs_instance = _DatabricksVolumesFS(profile=self._profile)
545
+
546
+ return self._fs_instance
@@ -0,0 +1 @@
1
+ import dvc_databricks