python-discovery 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- python_discovery/__init__.py +22 -0
- python_discovery/_cache.py +153 -0
- python_discovery/_cached_py_info.py +259 -0
- python_discovery/_compat.py +29 -0
- python_discovery/_discovery.py +308 -0
- python_discovery/_py_info.py +726 -0
- python_discovery/_py_spec.py +235 -0
- python_discovery/_specifier.py +264 -0
- python_discovery/_windows/__init__.py +13 -0
- python_discovery/_windows/_pep514.py +222 -0
- python_discovery/_windows/_propose.py +53 -0
- python_discovery/py.typed +0 -0
- python_discovery-1.0.0.dist-info/METADATA +71 -0
- python_discovery-1.0.0.dist-info/RECORD +16 -0
- python_discovery-1.0.0.dist-info/WHEEL +4 -0
- python_discovery-1.0.0.dist-info/licenses/LICENSE +18 -0
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
"""Self-contained Python interpreter discovery."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from importlib.metadata import version
|
|
6
|
+
|
|
7
|
+
from ._cache import ContentStore, DiskCache, PyInfoCache
|
|
8
|
+
from ._discovery import get_interpreter
|
|
9
|
+
from ._py_info import PythonInfo
|
|
10
|
+
from ._py_spec import PythonSpec
|
|
11
|
+
|
|
12
|
+
__version__ = version("python-discovery")
|
|
13
|
+
|
|
14
|
+
__all__ = [
|
|
15
|
+
"ContentStore",
|
|
16
|
+
"DiskCache",
|
|
17
|
+
"PyInfoCache",
|
|
18
|
+
"PythonInfo",
|
|
19
|
+
"PythonSpec",
|
|
20
|
+
"__version__",
|
|
21
|
+
"get_interpreter",
|
|
22
|
+
]
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
"""Cache Protocol and built-in implementations for Python interpreter discovery."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import logging
|
|
7
|
+
from contextlib import contextmanager, suppress
|
|
8
|
+
from hashlib import sha256
|
|
9
|
+
from typing import TYPE_CHECKING, Final, Protocol, runtime_checkable
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from collections.abc import Generator
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
|
|
15
|
+
_LOGGER: Final[logging.Logger] = logging.getLogger(__name__)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@runtime_checkable
|
|
19
|
+
class ContentStore(Protocol):
|
|
20
|
+
"""A store for reading and writing cached content."""
|
|
21
|
+
|
|
22
|
+
def exists(self) -> bool: ...
|
|
23
|
+
|
|
24
|
+
def read(self) -> dict | None: ...
|
|
25
|
+
|
|
26
|
+
def write(self, content: dict) -> None: ...
|
|
27
|
+
|
|
28
|
+
def remove(self) -> None: ...
|
|
29
|
+
|
|
30
|
+
@contextmanager
|
|
31
|
+
def locked(self) -> Generator[None]: ...
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@runtime_checkable
|
|
35
|
+
class PyInfoCache(Protocol):
|
|
36
|
+
"""Cache interface for Python interpreter information."""
|
|
37
|
+
|
|
38
|
+
def py_info(self, path: Path) -> ContentStore: ...
|
|
39
|
+
|
|
40
|
+
def py_info_clear(self) -> None: ...
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class DiskContentStore:
|
|
44
|
+
"""JSON file-based content store with file locking."""
|
|
45
|
+
|
|
46
|
+
def __init__(self, folder: Path, key: str) -> None:
|
|
47
|
+
self._folder = folder
|
|
48
|
+
self._key = key
|
|
49
|
+
|
|
50
|
+
@property
|
|
51
|
+
def _file(self) -> Path:
|
|
52
|
+
return self._folder / f"{self._key}.json"
|
|
53
|
+
|
|
54
|
+
def exists(self) -> bool:
|
|
55
|
+
return self._file.exists()
|
|
56
|
+
|
|
57
|
+
def read(self) -> dict | None:
|
|
58
|
+
data, bad_format = None, False
|
|
59
|
+
try:
|
|
60
|
+
data = json.loads(self._file.read_text(encoding="utf-8"))
|
|
61
|
+
except ValueError:
|
|
62
|
+
bad_format = True
|
|
63
|
+
except OSError:
|
|
64
|
+
_LOGGER.debug("failed to read %s", self._file, exc_info=True)
|
|
65
|
+
else:
|
|
66
|
+
_LOGGER.debug("got python info from %s", self._file)
|
|
67
|
+
return data
|
|
68
|
+
if bad_format:
|
|
69
|
+
with suppress(OSError):
|
|
70
|
+
self.remove()
|
|
71
|
+
return None
|
|
72
|
+
|
|
73
|
+
def write(self, content: dict) -> None:
|
|
74
|
+
self._folder.mkdir(parents=True, exist_ok=True)
|
|
75
|
+
self._file.write_text(json.dumps(content, sort_keys=True, indent=2), encoding="utf-8")
|
|
76
|
+
_LOGGER.debug("wrote python info at %s", self._file)
|
|
77
|
+
|
|
78
|
+
def remove(self) -> None:
|
|
79
|
+
with suppress(OSError):
|
|
80
|
+
self._file.unlink()
|
|
81
|
+
_LOGGER.debug("removed python info at %s", self._file)
|
|
82
|
+
|
|
83
|
+
@contextmanager
|
|
84
|
+
def locked(self) -> Generator[None]:
|
|
85
|
+
from filelock import FileLock # noqa: PLC0415
|
|
86
|
+
|
|
87
|
+
lock_path = self._folder / f"{self._key}.lock"
|
|
88
|
+
lock_path.parent.mkdir(parents=True, exist_ok=True)
|
|
89
|
+
with FileLock(str(lock_path)):
|
|
90
|
+
yield
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
class DiskCache:
|
|
94
|
+
"""File-system based Python interpreter info cache (``<root>/py_info/4/<sha256>.json``)."""
|
|
95
|
+
|
|
96
|
+
def __init__(self, root: Path) -> None:
|
|
97
|
+
self._root = root
|
|
98
|
+
|
|
99
|
+
@property
|
|
100
|
+
def _py_info_dir(self) -> Path:
|
|
101
|
+
return self._root / "py_info" / "4"
|
|
102
|
+
|
|
103
|
+
def py_info(self, path: Path) -> DiskContentStore:
|
|
104
|
+
key = sha256(str(path).encode("utf-8")).hexdigest()
|
|
105
|
+
return DiskContentStore(self._py_info_dir, key)
|
|
106
|
+
|
|
107
|
+
def py_info_clear(self) -> None:
|
|
108
|
+
folder = self._py_info_dir
|
|
109
|
+
if folder.exists():
|
|
110
|
+
for entry in folder.iterdir():
|
|
111
|
+
if entry.suffix == ".json":
|
|
112
|
+
with suppress(OSError):
|
|
113
|
+
entry.unlink()
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
class NoOpContentStore(ContentStore):
|
|
117
|
+
"""Content store that does nothing -- implements ContentStore protocol."""
|
|
118
|
+
|
|
119
|
+
def exists(self) -> bool: # noqa: PLR6301
|
|
120
|
+
return False
|
|
121
|
+
|
|
122
|
+
def read(self) -> dict | None: # noqa: PLR6301
|
|
123
|
+
return None
|
|
124
|
+
|
|
125
|
+
def write(self, content: dict) -> None:
|
|
126
|
+
pass
|
|
127
|
+
|
|
128
|
+
def remove(self) -> None:
|
|
129
|
+
pass
|
|
130
|
+
|
|
131
|
+
@contextmanager
|
|
132
|
+
def locked(self) -> Generator[None]: # noqa: PLR6301
|
|
133
|
+
yield
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
class NoOpCache(PyInfoCache):
|
|
137
|
+
"""Cache that does nothing -- implements PyInfoCache protocol."""
|
|
138
|
+
|
|
139
|
+
def py_info(self, path: Path) -> NoOpContentStore: # noqa: ARG002, PLR6301
|
|
140
|
+
return NoOpContentStore()
|
|
141
|
+
|
|
142
|
+
def py_info_clear(self) -> None:
|
|
143
|
+
pass
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
__all__ = [
|
|
147
|
+
"ContentStore",
|
|
148
|
+
"DiskCache",
|
|
149
|
+
"DiskContentStore",
|
|
150
|
+
"NoOpCache",
|
|
151
|
+
"NoOpContentStore",
|
|
152
|
+
"PyInfoCache",
|
|
153
|
+
]
|
|
@@ -0,0 +1,259 @@
|
|
|
1
|
+
"""Acquire Python information via subprocess interrogation with multi-level caching."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import hashlib
|
|
6
|
+
import json
|
|
7
|
+
import logging
|
|
8
|
+
import os
|
|
9
|
+
import pkgutil
|
|
10
|
+
import secrets
|
|
11
|
+
import subprocess # noqa: S404
|
|
12
|
+
import sys
|
|
13
|
+
import tempfile
|
|
14
|
+
from collections import OrderedDict
|
|
15
|
+
from contextlib import contextmanager
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
from shlex import quote
|
|
18
|
+
from subprocess import Popen # noqa: S404
|
|
19
|
+
from typing import TYPE_CHECKING, Final
|
|
20
|
+
|
|
21
|
+
from ._cache import NoOpCache
|
|
22
|
+
from ._py_info import PythonInfo
|
|
23
|
+
|
|
24
|
+
if TYPE_CHECKING:
|
|
25
|
+
from collections.abc import Generator, Mapping
|
|
26
|
+
|
|
27
|
+
from ._cache import ContentStore, PyInfoCache
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
_CACHE: OrderedDict[Path, PythonInfo | Exception] = OrderedDict()
|
|
31
|
+
_CACHE[Path(sys.executable)] = PythonInfo()
|
|
32
|
+
_LOGGER: Final[logging.Logger] = logging.getLogger(__name__)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def from_exe( # noqa: PLR0913
|
|
36
|
+
cls: type[PythonInfo],
|
|
37
|
+
cache: PyInfoCache | None,
|
|
38
|
+
exe: str,
|
|
39
|
+
env: Mapping[str, str] | None = None,
|
|
40
|
+
*,
|
|
41
|
+
raise_on_error: bool = True,
|
|
42
|
+
ignore_cache: bool = False,
|
|
43
|
+
) -> PythonInfo | None:
|
|
44
|
+
env = os.environ if env is None else env
|
|
45
|
+
result = _get_from_cache(cls, cache, exe, env, ignore_cache=ignore_cache)
|
|
46
|
+
if isinstance(result, Exception):
|
|
47
|
+
if raise_on_error:
|
|
48
|
+
raise result
|
|
49
|
+
_LOGGER.info("%s", result)
|
|
50
|
+
result = None
|
|
51
|
+
return result
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def _get_from_cache(
|
|
55
|
+
cls: type[PythonInfo],
|
|
56
|
+
cache: PyInfoCache | None,
|
|
57
|
+
exe: str,
|
|
58
|
+
env: Mapping[str, str],
|
|
59
|
+
*,
|
|
60
|
+
ignore_cache: bool = True,
|
|
61
|
+
) -> PythonInfo | Exception:
|
|
62
|
+
exe_path = Path(exe)
|
|
63
|
+
if not ignore_cache and exe_path in _CACHE:
|
|
64
|
+
result = _CACHE[exe_path]
|
|
65
|
+
else:
|
|
66
|
+
py_info = _get_via_file_cache(cls, cache, exe_path, exe, env)
|
|
67
|
+
result = _CACHE[exe_path] = py_info
|
|
68
|
+
if isinstance(result, PythonInfo):
|
|
69
|
+
result.executable = exe
|
|
70
|
+
return result
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def _get_via_file_cache(
|
|
74
|
+
cls: type[PythonInfo],
|
|
75
|
+
cache: PyInfoCache | None,
|
|
76
|
+
path: Path,
|
|
77
|
+
exe: str,
|
|
78
|
+
env: Mapping[str, str],
|
|
79
|
+
) -> PythonInfo | Exception:
|
|
80
|
+
path_text = str(path)
|
|
81
|
+
try:
|
|
82
|
+
path_modified = path.stat().st_mtime
|
|
83
|
+
except OSError:
|
|
84
|
+
path_modified = -1
|
|
85
|
+
py_info_script = Path(Path(__file__).resolve()).parent / "_py_info.py"
|
|
86
|
+
try:
|
|
87
|
+
py_info_hash: str | None = hashlib.sha256(py_info_script.read_bytes()).hexdigest()
|
|
88
|
+
except OSError:
|
|
89
|
+
py_info_hash = None
|
|
90
|
+
|
|
91
|
+
resolved_cache = cache if cache is not None else NoOpCache()
|
|
92
|
+
py_info: PythonInfo | None = None
|
|
93
|
+
py_info_store = resolved_cache.py_info(path)
|
|
94
|
+
with py_info_store.locked():
|
|
95
|
+
if py_info_store.exists() and (data := py_info_store.read()) is not None:
|
|
96
|
+
of_path, of_st_mtime = data.get("path"), data.get("st_mtime")
|
|
97
|
+
of_content, of_hash = data.get("content"), data.get("hash")
|
|
98
|
+
if (
|
|
99
|
+
of_path == path_text
|
|
100
|
+
and of_st_mtime == path_modified
|
|
101
|
+
and of_hash == py_info_hash
|
|
102
|
+
and isinstance(of_content, dict)
|
|
103
|
+
):
|
|
104
|
+
py_info = _load_cached_py_info(cls, py_info_store, of_content)
|
|
105
|
+
else:
|
|
106
|
+
py_info_store.remove()
|
|
107
|
+
if py_info is None:
|
|
108
|
+
failure, py_info = _run_subprocess(cls, exe, env)
|
|
109
|
+
if failure is not None:
|
|
110
|
+
_LOGGER.debug("first subprocess attempt failed for %s (%s), retrying", exe, failure)
|
|
111
|
+
failure, py_info = _run_subprocess(cls, exe, env)
|
|
112
|
+
if failure is not None:
|
|
113
|
+
return failure
|
|
114
|
+
if py_info is not None:
|
|
115
|
+
py_info_store.write({
|
|
116
|
+
"st_mtime": path_modified,
|
|
117
|
+
"path": path_text,
|
|
118
|
+
"content": py_info.to_dict(),
|
|
119
|
+
"hash": py_info_hash,
|
|
120
|
+
})
|
|
121
|
+
if py_info is None:
|
|
122
|
+
msg = f"{exe} failed to produce interpreter info"
|
|
123
|
+
return RuntimeError(msg)
|
|
124
|
+
return py_info
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def _load_cached_py_info(
|
|
128
|
+
cls: type[PythonInfo],
|
|
129
|
+
py_info_store: ContentStore,
|
|
130
|
+
content: dict,
|
|
131
|
+
) -> PythonInfo | None:
|
|
132
|
+
try:
|
|
133
|
+
py_info = cls.from_dict(content.copy())
|
|
134
|
+
except (KeyError, TypeError):
|
|
135
|
+
py_info_store.remove()
|
|
136
|
+
return None
|
|
137
|
+
if (sys_exe := py_info.system_executable) is not None and not Path(sys_exe).exists():
|
|
138
|
+
py_info_store.remove()
|
|
139
|
+
return None
|
|
140
|
+
return py_info
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
COOKIE_LENGTH: Final[int] = 32
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def gen_cookie() -> str:
|
|
147
|
+
return secrets.token_hex(COOKIE_LENGTH // 2)
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
@contextmanager
|
|
151
|
+
def _resolve_py_info_script() -> Generator[Path]:
|
|
152
|
+
py_info_script = Path(Path(__file__).resolve()).parent / "_py_info.py"
|
|
153
|
+
if py_info_script.is_file():
|
|
154
|
+
yield py_info_script
|
|
155
|
+
else:
|
|
156
|
+
data = pkgutil.get_data(__package__ or __name__, "_py_info.py")
|
|
157
|
+
if data is None:
|
|
158
|
+
msg = "cannot locate _py_info.py for subprocess interrogation"
|
|
159
|
+
raise FileNotFoundError(msg)
|
|
160
|
+
fd, tmp = tempfile.mkstemp(suffix=".py")
|
|
161
|
+
try:
|
|
162
|
+
os.write(fd, data)
|
|
163
|
+
os.close(fd)
|
|
164
|
+
yield Path(tmp)
|
|
165
|
+
finally:
|
|
166
|
+
Path(tmp).unlink()
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def _extract_between_cookies(out: str, start_cookie: str, end_cookie: str) -> tuple[str, str, int, int]:
|
|
170
|
+
"""Extract payload between reversed cookie markers, forwarding any surrounding output to stdout."""
|
|
171
|
+
raw_out = out
|
|
172
|
+
out_starts = out.find(start_cookie[::-1])
|
|
173
|
+
if out_starts > -1:
|
|
174
|
+
if pre_cookie := out[:out_starts]:
|
|
175
|
+
sys.stdout.write(pre_cookie)
|
|
176
|
+
out = out[out_starts + COOKIE_LENGTH :]
|
|
177
|
+
out_ends = out.find(end_cookie[::-1])
|
|
178
|
+
if out_ends > -1:
|
|
179
|
+
if post_cookie := out[out_ends + COOKIE_LENGTH :]:
|
|
180
|
+
sys.stdout.write(post_cookie)
|
|
181
|
+
out = out[:out_ends]
|
|
182
|
+
return out, raw_out, out_starts, out_ends
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def _run_subprocess(
|
|
186
|
+
cls: type[PythonInfo],
|
|
187
|
+
exe: str,
|
|
188
|
+
env: Mapping[str, str],
|
|
189
|
+
) -> tuple[Exception | None, PythonInfo | None]:
|
|
190
|
+
start_cookie = gen_cookie()
|
|
191
|
+
end_cookie = gen_cookie()
|
|
192
|
+
with _resolve_py_info_script() as py_info_script:
|
|
193
|
+
cmd = [exe, str(py_info_script), start_cookie, end_cookie]
|
|
194
|
+
env = dict(env)
|
|
195
|
+
env.pop("__PYVENV_LAUNCHER__", None)
|
|
196
|
+
env["PYTHONUTF8"] = "1"
|
|
197
|
+
_LOGGER.debug("get interpreter info via cmd: %s", LogCmd(cmd))
|
|
198
|
+
try:
|
|
199
|
+
process = Popen( # noqa: S603
|
|
200
|
+
cmd,
|
|
201
|
+
universal_newlines=True,
|
|
202
|
+
stdin=subprocess.PIPE,
|
|
203
|
+
stderr=subprocess.PIPE,
|
|
204
|
+
stdout=subprocess.PIPE,
|
|
205
|
+
env=env,
|
|
206
|
+
encoding="utf-8",
|
|
207
|
+
errors="backslashreplace",
|
|
208
|
+
)
|
|
209
|
+
out, err = process.communicate()
|
|
210
|
+
code = process.returncode
|
|
211
|
+
except OSError as os_error:
|
|
212
|
+
out, err, code = "", os_error.strerror, os_error.errno
|
|
213
|
+
if code != 0:
|
|
214
|
+
msg = f"{exe} with code {code}{f' out: {out!r}' if out else ''}{f' err: {err!r}' if err else ''}"
|
|
215
|
+
return RuntimeError(f"failed to query {msg}"), None
|
|
216
|
+
out, raw_out, out_starts, out_ends = _extract_between_cookies(out, start_cookie, end_cookie)
|
|
217
|
+
try:
|
|
218
|
+
result = cls.from_json(out)
|
|
219
|
+
result.executable = exe
|
|
220
|
+
except json.JSONDecodeError as exc:
|
|
221
|
+
_LOGGER.warning(
|
|
222
|
+
"subprocess %s returned invalid JSON; raw stdout %d chars, start cookie %s, end cookie %s, "
|
|
223
|
+
"parsed output %d chars: %r",
|
|
224
|
+
exe,
|
|
225
|
+
len(raw_out),
|
|
226
|
+
"found" if out_starts > -1 else "missing",
|
|
227
|
+
"found" if out_ends > -1 else "missing",
|
|
228
|
+
len(out),
|
|
229
|
+
out[:200] if out else "<empty>",
|
|
230
|
+
)
|
|
231
|
+
msg = f"{exe} returned invalid JSON (exit code {code}){f', stderr: {err!r}' if err else ''}"
|
|
232
|
+
failure = RuntimeError(msg)
|
|
233
|
+
failure.__cause__ = exc
|
|
234
|
+
return failure, None
|
|
235
|
+
return None, result
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
class LogCmd:
|
|
239
|
+
def __init__(self, cmd: list[str], env: Mapping[str, str] | None = None) -> None:
|
|
240
|
+
self.cmd = cmd
|
|
241
|
+
self.env = env
|
|
242
|
+
|
|
243
|
+
def __repr__(self) -> str:
|
|
244
|
+
cmd_repr = " ".join(quote(str(c)) for c in self.cmd)
|
|
245
|
+
if self.env is not None:
|
|
246
|
+
cmd_repr = f"{cmd_repr} env of {self.env!r}"
|
|
247
|
+
return cmd_repr
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
def clear(cache: PyInfoCache) -> None:
|
|
251
|
+
cache.py_info_clear()
|
|
252
|
+
_CACHE.clear()
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
__all__ = [
|
|
256
|
+
"LogCmd",
|
|
257
|
+
"clear",
|
|
258
|
+
"from_exe",
|
|
259
|
+
]
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"""Platform compatibility utilities for Python discovery."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import functools
|
|
6
|
+
import logging
|
|
7
|
+
import pathlib
|
|
8
|
+
import tempfile
|
|
9
|
+
from typing import Final
|
|
10
|
+
|
|
11
|
+
_LOGGER: Final[logging.Logger] = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@functools.lru_cache(maxsize=1)
|
|
15
|
+
def fs_is_case_sensitive() -> bool:
|
|
16
|
+
with tempfile.NamedTemporaryFile(prefix="TmP") as tmp_file:
|
|
17
|
+
result = not pathlib.Path(tmp_file.name.lower()).exists()
|
|
18
|
+
_LOGGER.debug("filesystem is %scase-sensitive", "" if result else "not ")
|
|
19
|
+
return result
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def fs_path_id(path: str) -> str:
|
|
23
|
+
return path.casefold() if not fs_is_case_sensitive() else path
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
__all__ = [
|
|
27
|
+
"fs_is_case_sensitive",
|
|
28
|
+
"fs_path_id",
|
|
29
|
+
]
|