gitstore 0.58.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gitstore/__init__.py +20 -0
- gitstore/_exclude.py +121 -0
- gitstore/_fileobj.py +143 -0
- gitstore/_fuse.py +202 -0
- gitstore/_glob.py +16 -0
- gitstore/_lock.py +76 -0
- gitstore/_objsize.py +107 -0
- gitstore/batch.py +184 -0
- gitstore/cli/__init__.py +6 -0
- gitstore/cli/_archive.py +382 -0
- gitstore/cli/_basic.py +1059 -0
- gitstore/cli/_cp.py +522 -0
- gitstore/cli/_helpers.py +610 -0
- gitstore/cli/_mirror.py +98 -0
- gitstore/cli/_mount.py +79 -0
- gitstore/cli/_notes.py +133 -0
- gitstore/cli/_refs.py +235 -0
- gitstore/cli/_serve.py +75 -0
- gitstore/cli/_sync.py +318 -0
- gitstore/cli/_watch.py +80 -0
- gitstore/cli/_web.py +414 -0
- gitstore/copy/__init__.py +56 -0
- gitstore/copy/_io.py +187 -0
- gitstore/copy/_ops.py +1079 -0
- gitstore/copy/_resolve.py +469 -0
- gitstore/copy/_types.py +207 -0
- gitstore/exceptions.py +9 -0
- gitstore/fs.py +1401 -0
- gitstore/mirror.py +303 -0
- gitstore/notes.py +451 -0
- gitstore/py.typed +0 -0
- gitstore/repo.py +598 -0
- gitstore/tree.py +353 -0
- gitstore-0.58.3.dist-info/METADATA +27 -0
- gitstore-0.58.3.dist-info/RECORD +38 -0
- gitstore-0.58.3.dist-info/WHEEL +4 -0
- gitstore-0.58.3.dist-info/entry_points.txt +2 -0
- gitstore-0.58.3.dist-info/licenses/LICENSE +191 -0
gitstore/__init__.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
from .repo import GitStore, RefDict, ReflogEntry, Signature
|
|
2
|
+
from .mirror import MirrorDiff, RefChange, resolve_credentials
|
|
3
|
+
from .notes import NoteDict, NoteNamespace, NotesBatch
|
|
4
|
+
from .fs import FS, StatResult, WriteEntry, retry_write
|
|
5
|
+
from .tree import BlobOid, GitError, WalkEntry
|
|
6
|
+
from .batch import Batch
|
|
7
|
+
from .exceptions import StaleSnapshotError
|
|
8
|
+
from ._exclude import ExcludeFilter
|
|
9
|
+
from .copy import ChangeReport, ChangeAction, ChangeActionKind, ChangeError, FileEntry, FileType, disk_glob
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"GitStore", "RefDict", "ReflogEntry", "Signature",
|
|
13
|
+
"MirrorDiff", "RefChange", "resolve_credentials",
|
|
14
|
+
"NoteDict", "NoteNamespace", "NotesBatch",
|
|
15
|
+
"FS", "StatResult", "WriteEntry", "retry_write", "StaleSnapshotError",
|
|
16
|
+
"Batch", "BlobOid", "GitError",
|
|
17
|
+
"ChangeReport", "ChangeAction", "ChangeActionKind", "ChangeError",
|
|
18
|
+
"ExcludeFilter", "FileEntry", "FileType", "disk_glob",
|
|
19
|
+
"WalkEntry",
|
|
20
|
+
]
|
gitstore/_exclude.py
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
"""Exclude-filter support for disk→repo operations.
|
|
2
|
+
|
|
3
|
+
Combines ``--exclude`` patterns, ``--exclude-from`` files, and automatic
|
|
4
|
+
``.gitignore`` loading into a single predicate used by
|
|
5
|
+
``_walk_local_paths`` and ``_enum_disk_to_repo``.
|
|
6
|
+
|
|
7
|
+
Pattern syntax follows gitignore rules (implemented by
|
|
8
|
+
``dulwich.ignore.IgnoreFilter``).
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Sequence
|
|
15
|
+
|
|
16
|
+
from dulwich.ignore import IgnoreFilter, IgnoreFilterStack
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class ExcludeFilter:
|
|
20
|
+
"""Gitignore-style exclude filter for disk-to-repo operations.
|
|
21
|
+
|
|
22
|
+
Combines ``--exclude`` patterns, ``--exclude-from`` file, and automatic
|
|
23
|
+
``.gitignore`` loading into a single predicate.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
patterns: Gitignore-style patterns to exclude.
|
|
27
|
+
exclude_from: Path to a file containing exclude patterns.
|
|
28
|
+
gitignore: Load ``.gitignore`` files from walked directories.
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
def __init__(
|
|
32
|
+
self,
|
|
33
|
+
*,
|
|
34
|
+
patterns: Sequence[str] | None = None,
|
|
35
|
+
exclude_from: str | None = None,
|
|
36
|
+
gitignore: bool = False,
|
|
37
|
+
) -> None:
|
|
38
|
+
base_lines: list[bytes] = []
|
|
39
|
+
for p in patterns or ():
|
|
40
|
+
base_lines.append(p.encode("utf-8"))
|
|
41
|
+
if exclude_from is not None:
|
|
42
|
+
path = Path(exclude_from)
|
|
43
|
+
for raw in path.read_bytes().splitlines():
|
|
44
|
+
line = raw.strip()
|
|
45
|
+
if line and not line.startswith(b"#"):
|
|
46
|
+
base_lines.append(line)
|
|
47
|
+
self._base: IgnoreFilter | None = (
|
|
48
|
+
IgnoreFilter(base_lines) if base_lines else None
|
|
49
|
+
)
|
|
50
|
+
self._gitignore = gitignore
|
|
51
|
+
# {rel_dir: IgnoreFilter | None} — lazily loaded per directory
|
|
52
|
+
self._dir_filters: dict[str, IgnoreFilter | None] = {}
|
|
53
|
+
|
|
54
|
+
# ------------------------------------------------------------------
|
|
55
|
+
@property
|
|
56
|
+
def active(self) -> bool:
|
|
57
|
+
"""True if any filtering is configured."""
|
|
58
|
+
return self._base is not None or self._gitignore
|
|
59
|
+
|
|
60
|
+
# ------------------------------------------------------------------
|
|
61
|
+
def is_excluded(self, rel_path: str, *, is_dir: bool = False) -> bool:
|
|
62
|
+
"""Check against base patterns only (for post-filtering)."""
|
|
63
|
+
if self._base is None:
|
|
64
|
+
return False
|
|
65
|
+
check = rel_path + "/" if is_dir else rel_path
|
|
66
|
+
return self._base.is_ignored(check) is True
|
|
67
|
+
|
|
68
|
+
# ------------------------------------------------------------------
|
|
69
|
+
def enter_directory(self, abs_dir: Path, rel_dir: str) -> None:
|
|
70
|
+
"""Load .gitignore from *abs_dir* if gitignore mode is on."""
|
|
71
|
+
if not self._gitignore:
|
|
72
|
+
return
|
|
73
|
+
if rel_dir in self._dir_filters:
|
|
74
|
+
return
|
|
75
|
+
gi = abs_dir / ".gitignore"
|
|
76
|
+
if gi.is_file():
|
|
77
|
+
self._dir_filters[rel_dir] = IgnoreFilter.from_path(str(gi))
|
|
78
|
+
else:
|
|
79
|
+
self._dir_filters[rel_dir] = None
|
|
80
|
+
|
|
81
|
+
# ------------------------------------------------------------------
|
|
82
|
+
def is_excluded_in_walk(
|
|
83
|
+
self, rel_path: str, *, is_dir: bool = False,
|
|
84
|
+
) -> bool:
|
|
85
|
+
"""Check base patterns + loaded .gitignore hierarchy.
|
|
86
|
+
|
|
87
|
+
Called during ``os.walk()`` after ``enter_directory`` has been
|
|
88
|
+
invoked for every ancestor.
|
|
89
|
+
"""
|
|
90
|
+
check = rel_path + "/" if is_dir else rel_path
|
|
91
|
+
|
|
92
|
+
# Base patterns (--exclude / --exclude-from)
|
|
93
|
+
if self._base is not None and self._base.is_ignored(check) is True:
|
|
94
|
+
return True
|
|
95
|
+
|
|
96
|
+
if not self._gitignore:
|
|
97
|
+
return False
|
|
98
|
+
|
|
99
|
+
# Auto-exclude .gitignore files themselves
|
|
100
|
+
if not is_dir and rel_path.rsplit("/", 1)[-1] == ".gitignore":
|
|
101
|
+
return True
|
|
102
|
+
|
|
103
|
+
# Walk .gitignore filters from root → deepest ancestor.
|
|
104
|
+
# Git semantics: last (deepest) matching rule wins.
|
|
105
|
+
parts = rel_path.split("/")
|
|
106
|
+
excluded = None
|
|
107
|
+
for depth in range(len(parts)):
|
|
108
|
+
if depth == 0:
|
|
109
|
+
dir_key = ""
|
|
110
|
+
else:
|
|
111
|
+
dir_key = "/".join(parts[:depth])
|
|
112
|
+
filt = self._dir_filters.get(dir_key)
|
|
113
|
+
if filt is not None:
|
|
114
|
+
# Path relative to this .gitignore's directory
|
|
115
|
+
sub = "/".join(parts[depth:])
|
|
116
|
+
sub_check = sub + "/" if is_dir else sub
|
|
117
|
+
result = filt.is_ignored(sub_check)
|
|
118
|
+
if result is not None:
|
|
119
|
+
excluded = result
|
|
120
|
+
|
|
121
|
+
return excluded is True
|
gitstore/_fileobj.py
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
"""File-like objects for gitstore."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import io
|
|
6
|
+
from typing import TYPE_CHECKING
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from .fs import FS
|
|
10
|
+
from .batch import Batch
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class ReadableFile:
|
|
14
|
+
"""Read-only file-like object wrapping bytes."""
|
|
15
|
+
|
|
16
|
+
def __init__(self, data: bytes):
|
|
17
|
+
self._buf = io.BytesIO(data)
|
|
18
|
+
self._closed = False
|
|
19
|
+
|
|
20
|
+
@property
|
|
21
|
+
def closed(self) -> bool:
|
|
22
|
+
return self._closed
|
|
23
|
+
|
|
24
|
+
def readable(self) -> bool:
|
|
25
|
+
return True
|
|
26
|
+
|
|
27
|
+
def writable(self) -> bool:
|
|
28
|
+
return False
|
|
29
|
+
|
|
30
|
+
def seekable(self) -> bool:
|
|
31
|
+
return True
|
|
32
|
+
|
|
33
|
+
def read(self, size: int = -1) -> bytes:
|
|
34
|
+
if self._closed:
|
|
35
|
+
raise ValueError("I/O operation on closed file.")
|
|
36
|
+
return self._buf.read(size)
|
|
37
|
+
|
|
38
|
+
def seek(self, offset: int, whence: int = 0) -> int:
|
|
39
|
+
if self._closed:
|
|
40
|
+
raise ValueError("I/O operation on closed file.")
|
|
41
|
+
return self._buf.seek(offset, whence)
|
|
42
|
+
|
|
43
|
+
def tell(self) -> int:
|
|
44
|
+
if self._closed:
|
|
45
|
+
raise ValueError("I/O operation on closed file.")
|
|
46
|
+
return self._buf.tell()
|
|
47
|
+
|
|
48
|
+
def close(self) -> None:
|
|
49
|
+
self._closed = True
|
|
50
|
+
|
|
51
|
+
def __enter__(self):
|
|
52
|
+
return self
|
|
53
|
+
|
|
54
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
55
|
+
self.close()
|
|
56
|
+
return False
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class WritableFile:
|
|
60
|
+
"""Writable file-like object that commits on close."""
|
|
61
|
+
|
|
62
|
+
def __init__(self, fs: FS, path: str):
|
|
63
|
+
self._fs = fs
|
|
64
|
+
self._path = path
|
|
65
|
+
self._buf = io.BytesIO()
|
|
66
|
+
self._closed = False
|
|
67
|
+
self.fs: FS | None = None
|
|
68
|
+
|
|
69
|
+
@property
|
|
70
|
+
def closed(self) -> bool:
|
|
71
|
+
return self._closed
|
|
72
|
+
|
|
73
|
+
def readable(self) -> bool:
|
|
74
|
+
return False
|
|
75
|
+
|
|
76
|
+
def writable(self) -> bool:
|
|
77
|
+
return True
|
|
78
|
+
|
|
79
|
+
def seekable(self) -> bool:
|
|
80
|
+
return False
|
|
81
|
+
|
|
82
|
+
def write(self, data: bytes) -> int:
|
|
83
|
+
if self._closed:
|
|
84
|
+
raise ValueError("I/O operation on closed file.")
|
|
85
|
+
return self._buf.write(data)
|
|
86
|
+
|
|
87
|
+
def close(self) -> None:
|
|
88
|
+
if not self._closed:
|
|
89
|
+
self.fs = self._fs.write(self._path, self._buf.getvalue())
|
|
90
|
+
self._closed = True
|
|
91
|
+
|
|
92
|
+
def __enter__(self):
|
|
93
|
+
return self
|
|
94
|
+
|
|
95
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
96
|
+
if exc_type is None:
|
|
97
|
+
self.close()
|
|
98
|
+
else:
|
|
99
|
+
self._closed = True
|
|
100
|
+
return False
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
class BatchWritableFile:
|
|
104
|
+
"""Writable file-like object that stages to a batch on close."""
|
|
105
|
+
|
|
106
|
+
def __init__(self, batch: Batch, path: str):
|
|
107
|
+
self._batch = batch
|
|
108
|
+
self._path = path
|
|
109
|
+
self._buf = io.BytesIO()
|
|
110
|
+
self._closed = False
|
|
111
|
+
|
|
112
|
+
@property
|
|
113
|
+
def closed(self) -> bool:
|
|
114
|
+
return self._closed
|
|
115
|
+
|
|
116
|
+
def readable(self) -> bool:
|
|
117
|
+
return False
|
|
118
|
+
|
|
119
|
+
def writable(self) -> bool:
|
|
120
|
+
return True
|
|
121
|
+
|
|
122
|
+
def seekable(self) -> bool:
|
|
123
|
+
return False
|
|
124
|
+
|
|
125
|
+
def write(self, data: bytes) -> int:
|
|
126
|
+
if self._closed:
|
|
127
|
+
raise ValueError("I/O operation on closed file.")
|
|
128
|
+
return self._buf.write(data)
|
|
129
|
+
|
|
130
|
+
def close(self) -> None:
|
|
131
|
+
if not self._closed:
|
|
132
|
+
self._batch.write(self._path, self._buf.getvalue())
|
|
133
|
+
self._closed = True
|
|
134
|
+
|
|
135
|
+
def __enter__(self):
|
|
136
|
+
return self
|
|
137
|
+
|
|
138
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
139
|
+
if exc_type is None:
|
|
140
|
+
self.close()
|
|
141
|
+
else:
|
|
142
|
+
self._closed = True
|
|
143
|
+
return False
|
gitstore/_fuse.py
ADDED
|
@@ -0,0 +1,202 @@
|
|
|
1
|
+
"""Read-only FUSE mount for gitstore."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import errno
|
|
6
|
+
import os
|
|
7
|
+
import stat
|
|
8
|
+
import sys
|
|
9
|
+
import threading
|
|
10
|
+
from typing import TYPE_CHECKING
|
|
11
|
+
|
|
12
|
+
try:
|
|
13
|
+
import mfusepy
|
|
14
|
+
except OSError as _exc:
|
|
15
|
+
raise ImportError(f"FUSE support unavailable: {_exc}") from _exc
|
|
16
|
+
|
|
17
|
+
from .tree import (
|
|
18
|
+
GIT_FILEMODE_BLOB,
|
|
19
|
+
GIT_FILEMODE_BLOB_EXECUTABLE,
|
|
20
|
+
GIT_FILEMODE_LINK,
|
|
21
|
+
GIT_FILEMODE_TREE,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
if TYPE_CHECKING:
|
|
25
|
+
from .fs import FS
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def _git_mode_to_stat(git_mode: int) -> int:
|
|
29
|
+
"""Convert a git filemode to a POSIX stat mode."""
|
|
30
|
+
if git_mode == GIT_FILEMODE_TREE:
|
|
31
|
+
return stat.S_IFDIR | 0o755
|
|
32
|
+
if git_mode == GIT_FILEMODE_BLOB_EXECUTABLE:
|
|
33
|
+
return stat.S_IFREG | 0o755
|
|
34
|
+
if git_mode == GIT_FILEMODE_LINK:
|
|
35
|
+
return stat.S_IFLNK | 0o777
|
|
36
|
+
# GIT_FILEMODE_BLOB and anything else
|
|
37
|
+
return stat.S_IFREG | 0o644
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def _fuse_path(path: str) -> str | None:
|
|
41
|
+
"""Convert FUSE path (``/foo/bar``) to gitstore path (``foo/bar``).
|
|
42
|
+
|
|
43
|
+
Returns ``None`` for the root directory.
|
|
44
|
+
"""
|
|
45
|
+
stripped = path.lstrip("/")
|
|
46
|
+
return stripped if stripped else None
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class GitStoreOperations(mfusepy.Operations):
|
|
50
|
+
"""Read-only FUSE operations backed by a gitstore FS snapshot."""
|
|
51
|
+
|
|
52
|
+
def __init__(self, fs: FS):
|
|
53
|
+
self._fs = fs
|
|
54
|
+
self._lock = threading.Lock()
|
|
55
|
+
self._uid = os.getuid()
|
|
56
|
+
self._gid = os.getgid()
|
|
57
|
+
|
|
58
|
+
def getattr(self, path, fh=None):
|
|
59
|
+
gs_path = _fuse_path(path)
|
|
60
|
+
with self._lock:
|
|
61
|
+
try:
|
|
62
|
+
st = self._fs.stat(gs_path)
|
|
63
|
+
except FileNotFoundError:
|
|
64
|
+
raise mfusepy.FuseOSError(errno.ENOENT)
|
|
65
|
+
except NotADirectoryError:
|
|
66
|
+
raise mfusepy.FuseOSError(errno.ENOTDIR)
|
|
67
|
+
|
|
68
|
+
return {
|
|
69
|
+
"st_mode": _git_mode_to_stat(st.mode),
|
|
70
|
+
"st_size": st.size,
|
|
71
|
+
"st_nlink": st.nlink,
|
|
72
|
+
"st_mtime": st.mtime,
|
|
73
|
+
"st_atime": st.mtime,
|
|
74
|
+
"st_ctime": st.mtime,
|
|
75
|
+
"st_uid": self._uid,
|
|
76
|
+
"st_gid": self._gid,
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
def readdir(self, path, fh):
|
|
80
|
+
gs_path = _fuse_path(path)
|
|
81
|
+
with self._lock:
|
|
82
|
+
try:
|
|
83
|
+
entries = self._fs.listdir(gs_path)
|
|
84
|
+
except FileNotFoundError:
|
|
85
|
+
raise mfusepy.FuseOSError(errno.ENOENT)
|
|
86
|
+
except NotADirectoryError:
|
|
87
|
+
raise mfusepy.FuseOSError(errno.ENOTDIR)
|
|
88
|
+
yield "."
|
|
89
|
+
yield ".."
|
|
90
|
+
for entry in entries:
|
|
91
|
+
yield entry.name
|
|
92
|
+
|
|
93
|
+
def read(self, path, size, offset, fh):
|
|
94
|
+
gs_path = _fuse_path(path)
|
|
95
|
+
with self._lock:
|
|
96
|
+
try:
|
|
97
|
+
return self._fs.read(gs_path, offset=offset, size=size)
|
|
98
|
+
except FileNotFoundError:
|
|
99
|
+
raise mfusepy.FuseOSError(errno.ENOENT)
|
|
100
|
+
except IsADirectoryError:
|
|
101
|
+
raise mfusepy.FuseOSError(errno.EISDIR)
|
|
102
|
+
|
|
103
|
+
def readlink(self, path):
|
|
104
|
+
gs_path = _fuse_path(path)
|
|
105
|
+
with self._lock:
|
|
106
|
+
try:
|
|
107
|
+
return self._fs.readlink(gs_path)
|
|
108
|
+
except FileNotFoundError:
|
|
109
|
+
raise mfusepy.FuseOSError(errno.ENOENT)
|
|
110
|
+
except ValueError:
|
|
111
|
+
raise mfusepy.FuseOSError(errno.EINVAL)
|
|
112
|
+
|
|
113
|
+
def open(self, path, flags):
|
|
114
|
+
accmode = flags & (os.O_RDONLY | os.O_WRONLY | os.O_RDWR)
|
|
115
|
+
if accmode != os.O_RDONLY:
|
|
116
|
+
raise mfusepy.FuseOSError(errno.EROFS)
|
|
117
|
+
return 0
|
|
118
|
+
|
|
119
|
+
def access(self, path, amode):
|
|
120
|
+
if amode & os.W_OK:
|
|
121
|
+
raise mfusepy.FuseOSError(errno.EROFS)
|
|
122
|
+
gs_path = _fuse_path(path)
|
|
123
|
+
with self._lock:
|
|
124
|
+
if gs_path is not None and not self._fs.exists(gs_path):
|
|
125
|
+
raise mfusepy.FuseOSError(errno.ENOENT)
|
|
126
|
+
return 0
|
|
127
|
+
|
|
128
|
+
def statfs(self, path):
|
|
129
|
+
return {
|
|
130
|
+
"f_bsize": 4096,
|
|
131
|
+
"f_frsize": 4096,
|
|
132
|
+
"f_blocks": 0,
|
|
133
|
+
"f_bfree": 0,
|
|
134
|
+
"f_bavail": 0,
|
|
135
|
+
"f_files": 0,
|
|
136
|
+
"f_ffree": 0,
|
|
137
|
+
"f_favail": 0,
|
|
138
|
+
"f_namemax": 255,
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
def utimens(self, path, times=None):
|
|
142
|
+
raise mfusepy.FuseOSError(errno.EROFS)
|
|
143
|
+
|
|
144
|
+
def destroy(self, path):
|
|
145
|
+
with self._lock:
|
|
146
|
+
self._fs.close()
|
|
147
|
+
|
|
148
|
+
# Disable write operations — mfusepy returns ENOSYS at kernel level for None
|
|
149
|
+
chmod = None
|
|
150
|
+
chown = None
|
|
151
|
+
create = None
|
|
152
|
+
link = None
|
|
153
|
+
mkdir = None
|
|
154
|
+
mknod = None
|
|
155
|
+
rename = None
|
|
156
|
+
rmdir = None
|
|
157
|
+
symlink = None
|
|
158
|
+
truncate = None
|
|
159
|
+
unlink = None
|
|
160
|
+
write = None
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def mount(
|
|
164
|
+
fs: FS,
|
|
165
|
+
mountpoint: str,
|
|
166
|
+
*,
|
|
167
|
+
foreground: bool = True,
|
|
168
|
+
debug: bool = False,
|
|
169
|
+
nothreads: bool = False,
|
|
170
|
+
allow_other: bool = False,
|
|
171
|
+
) -> None:
|
|
172
|
+
"""Mount a gitstore FS snapshot as a read-only FUSE filesystem.
|
|
173
|
+
|
|
174
|
+
Blocks until the filesystem is unmounted (Ctrl-C or ``umount``).
|
|
175
|
+
"""
|
|
176
|
+
ref_label = fs.ref_name or fs.commit_hash[:12]
|
|
177
|
+
ops = GitStoreOperations(fs)
|
|
178
|
+
|
|
179
|
+
fuse_kwargs: dict = {
|
|
180
|
+
"ro": True,
|
|
181
|
+
"attr_timeout": 3600,
|
|
182
|
+
"entry_timeout": 3600,
|
|
183
|
+
"fsname": f"gitstore:{ref_label}",
|
|
184
|
+
"subtype": "gitstore",
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
if allow_other:
|
|
188
|
+
fuse_kwargs["allow_other"] = True
|
|
189
|
+
|
|
190
|
+
if sys.platform == "darwin":
|
|
191
|
+
fuse_kwargs["noappledouble"] = True
|
|
192
|
+
fuse_kwargs["noapplexattr"] = True
|
|
193
|
+
fuse_kwargs["volname"] = f"gitstore ({ref_label})"
|
|
194
|
+
|
|
195
|
+
mfusepy.FUSE(
|
|
196
|
+
ops,
|
|
197
|
+
mountpoint,
|
|
198
|
+
foreground=foreground,
|
|
199
|
+
debug=debug,
|
|
200
|
+
nothreads=nothreads,
|
|
201
|
+
**fuse_kwargs,
|
|
202
|
+
)
|
gitstore/_glob.py
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
"""Shared dotfile-aware glob matching."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from fnmatch import fnmatch as _fnmatch
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def _glob_match(pattern: str, name: str) -> bool:
|
|
9
|
+
"""Match *name* against a glob *pattern* segment.
|
|
10
|
+
|
|
11
|
+
``*`` and ``?`` do not match a leading ``.`` unless the pattern itself
|
|
12
|
+
starts with ``.`` (Unix/rsync convention).
|
|
13
|
+
"""
|
|
14
|
+
if not pattern.startswith(".") and name.startswith("."):
|
|
15
|
+
return False
|
|
16
|
+
return _fnmatch(name, pattern)
|
gitstore/_lock.py
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
"""Advisory repo lock: serializes ref mutations across threads and processes."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
import threading
|
|
7
|
+
from contextlib import contextmanager
|
|
8
|
+
|
|
9
|
+
# Per-process threading locks, keyed by resolved repo path
|
|
10
|
+
_thread_locks: dict[tuple[int, int] | str, threading.Lock] = {}
|
|
11
|
+
_thread_locks_guard = threading.Lock()
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _get_thread_lock(repo_path: str) -> threading.Lock:
|
|
15
|
+
real = os.path.realpath(repo_path)
|
|
16
|
+
try:
|
|
17
|
+
st = os.stat(real)
|
|
18
|
+
key: tuple[int, int] | str = (st.st_dev, st.st_ino)
|
|
19
|
+
if st.st_ino == 0:
|
|
20
|
+
key = os.path.normcase(real)
|
|
21
|
+
except OSError:
|
|
22
|
+
key = os.path.normcase(real)
|
|
23
|
+
with _thread_locks_guard:
|
|
24
|
+
if key not in _thread_locks:
|
|
25
|
+
_thread_locks[key] = threading.Lock()
|
|
26
|
+
return _thread_locks[key]
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
try:
|
|
30
|
+
import fcntl
|
|
31
|
+
|
|
32
|
+
def _lock_path(repo_path: str) -> str:
|
|
33
|
+
if os.path.isdir(repo_path):
|
|
34
|
+
return os.path.join(repo_path, "gitstore.lock")
|
|
35
|
+
return repo_path + ".lock"
|
|
36
|
+
|
|
37
|
+
@contextmanager
|
|
38
|
+
def repo_lock(repo_path: str):
|
|
39
|
+
tlock = _get_thread_lock(repo_path)
|
|
40
|
+
tlock.acquire()
|
|
41
|
+
try:
|
|
42
|
+
lock_path = _lock_path(repo_path)
|
|
43
|
+
fd = os.open(lock_path, os.O_CREAT | os.O_RDWR | getattr(os, "O_CLOEXEC", 0))
|
|
44
|
+
try:
|
|
45
|
+
fcntl.flock(fd, fcntl.LOCK_EX)
|
|
46
|
+
yield
|
|
47
|
+
finally:
|
|
48
|
+
fcntl.flock(fd, fcntl.LOCK_UN)
|
|
49
|
+
os.close(fd)
|
|
50
|
+
finally:
|
|
51
|
+
tlock.release()
|
|
52
|
+
|
|
53
|
+
except ImportError:
|
|
54
|
+
import msvcrt
|
|
55
|
+
|
|
56
|
+
def _lock_path(repo_path: str) -> str:
|
|
57
|
+
if os.path.isdir(repo_path):
|
|
58
|
+
return os.path.join(repo_path, "gitstore.lock")
|
|
59
|
+
return repo_path + ".lock"
|
|
60
|
+
|
|
61
|
+
@contextmanager
|
|
62
|
+
def repo_lock(repo_path: str):
|
|
63
|
+
tlock = _get_thread_lock(repo_path)
|
|
64
|
+
tlock.acquire()
|
|
65
|
+
try:
|
|
66
|
+
lock_path = _lock_path(repo_path)
|
|
67
|
+
fd = os.open(lock_path, os.O_CREAT | os.O_RDWR)
|
|
68
|
+
os.set_inheritable(fd, False)
|
|
69
|
+
try:
|
|
70
|
+
msvcrt.locking(fd, msvcrt.LK_LOCK, 1)
|
|
71
|
+
yield
|
|
72
|
+
finally:
|
|
73
|
+
msvcrt.locking(fd, msvcrt.LK_UNLCK, 1)
|
|
74
|
+
os.close(fd)
|
|
75
|
+
finally:
|
|
76
|
+
tlock.release()
|
gitstore/_objsize.py
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
"""Efficient object size queries without loading full content.
|
|
2
|
+
|
|
3
|
+
For non-delta packed objects, reads only the pack entry header.
|
|
4
|
+
For loose objects, decompresses only the object header.
|
|
5
|
+
For delta objects, falls back to full resolution via dulwich.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import os
|
|
11
|
+
import zlib
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class ObjectSizer:
|
|
15
|
+
"""Batch-efficient object size lookup.
|
|
16
|
+
|
|
17
|
+
Usage::
|
|
18
|
+
|
|
19
|
+
with ObjectSizer(dulwich_object_store) as sizer:
|
|
20
|
+
size = sizer.size(sha_hex)
|
|
21
|
+
|
|
22
|
+
*sha_hex* is a 40-char hex bytes SHA (dulwich native format).
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
__slots__ = ("_store", "_pack_fds", "_pack_index")
|
|
26
|
+
|
|
27
|
+
def __init__(self, object_store):
|
|
28
|
+
self._store = object_store
|
|
29
|
+
self._pack_fds: dict[str, object] = {}
|
|
30
|
+
self._pack_index: dict[bytes, tuple[str, int]] | None = None
|
|
31
|
+
|
|
32
|
+
# -- public API ----------------------------------------------------------
|
|
33
|
+
|
|
34
|
+
def size(self, sha_hex: bytes) -> int:
|
|
35
|
+
"""Return the decompressed size of the object."""
|
|
36
|
+
if self._pack_index is None:
|
|
37
|
+
self._build_pack_index()
|
|
38
|
+
|
|
39
|
+
sha_raw = bytes.fromhex(
|
|
40
|
+
sha_hex.decode() if isinstance(sha_hex, bytes) else sha_hex
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
entry = self._pack_index.get(sha_raw) # type: ignore[union-attr]
|
|
44
|
+
if entry is not None:
|
|
45
|
+
fname, offset = entry
|
|
46
|
+
obj_type, obj_size = self._read_pack_header(fname, offset)
|
|
47
|
+
if obj_type <= 4: # commit=1, tree=2, blob=3, tag=4
|
|
48
|
+
return obj_size
|
|
49
|
+
# OFS_DELTA=6, REF_DELTA=7 — need full decompression
|
|
50
|
+
return self._store[sha_hex].raw_length()
|
|
51
|
+
|
|
52
|
+
# Loose object
|
|
53
|
+
return self._read_loose_header(sha_hex)
|
|
54
|
+
|
|
55
|
+
# -- internals -----------------------------------------------------------
|
|
56
|
+
|
|
57
|
+
def _build_pack_index(self):
|
|
58
|
+
self._pack_index = {}
|
|
59
|
+
for pack in self._store.packs:
|
|
60
|
+
fname = pack.data._filename
|
|
61
|
+
for sha_raw, offset, _crc32 in pack.index.iterentries():
|
|
62
|
+
self._pack_index[sha_raw] = (fname, offset)
|
|
63
|
+
|
|
64
|
+
def _read_pack_header(self, filename: str, offset: int) -> tuple[int, int]:
|
|
65
|
+
"""Read type and decompressed size from a pack entry header."""
|
|
66
|
+
f = self._pack_fds.get(filename)
|
|
67
|
+
if f is None:
|
|
68
|
+
f = open(filename, "rb")
|
|
69
|
+
self._pack_fds[filename] = f
|
|
70
|
+
|
|
71
|
+
f.seek(offset)
|
|
72
|
+
byte = f.read(1)[0]
|
|
73
|
+
obj_type = (byte >> 4) & 0x07
|
|
74
|
+
size = byte & 0x0F
|
|
75
|
+
shift = 4
|
|
76
|
+
while byte & 0x80:
|
|
77
|
+
byte = f.read(1)[0]
|
|
78
|
+
size |= (byte & 0x7F) << shift
|
|
79
|
+
shift += 7
|
|
80
|
+
return obj_type, size
|
|
81
|
+
|
|
82
|
+
def _read_loose_header(self, sha_hex: bytes) -> int:
|
|
83
|
+
"""Read size from a loose object header."""
|
|
84
|
+
if not hasattr(self._store, 'path') or not os.path.isdir(self._store.path):
|
|
85
|
+
return self._store[sha_hex].raw_length()
|
|
86
|
+
h = sha_hex.decode() if isinstance(sha_hex, bytes) else sha_hex
|
|
87
|
+
path = os.path.join(self._store.path, h[:2], h[2:])
|
|
88
|
+
with open(path, "rb") as f:
|
|
89
|
+
d = zlib.decompressobj()
|
|
90
|
+
header = d.decompress(f.read(64), 256)
|
|
91
|
+
nul = header.index(b"\x00")
|
|
92
|
+
_, size_str = header[:nul].split(b" ", 1)
|
|
93
|
+
return int(size_str)
|
|
94
|
+
|
|
95
|
+
# -- context manager -----------------------------------------------------
|
|
96
|
+
|
|
97
|
+
def close(self):
|
|
98
|
+
for fd in self._pack_fds.values():
|
|
99
|
+
fd.close()
|
|
100
|
+
self._pack_fds.clear()
|
|
101
|
+
self._pack_index = None
|
|
102
|
+
|
|
103
|
+
def __enter__(self):
|
|
104
|
+
return self
|
|
105
|
+
|
|
106
|
+
def __exit__(self, *args):
|
|
107
|
+
self.close()
|