datamodel-code-generator 0.11.12__py3-none-any.whl → 0.45.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. datamodel_code_generator/__init__.py +654 -185
  2. datamodel_code_generator/__main__.py +872 -388
  3. datamodel_code_generator/arguments.py +798 -0
  4. datamodel_code_generator/cli_options.py +295 -0
  5. datamodel_code_generator/format.py +292 -54
  6. datamodel_code_generator/http.py +85 -10
  7. datamodel_code_generator/imports.py +152 -43
  8. datamodel_code_generator/model/__init__.py +138 -1
  9. datamodel_code_generator/model/base.py +531 -120
  10. datamodel_code_generator/model/dataclass.py +211 -0
  11. datamodel_code_generator/model/enum.py +133 -12
  12. datamodel_code_generator/model/imports.py +22 -0
  13. datamodel_code_generator/model/msgspec.py +462 -0
  14. datamodel_code_generator/model/pydantic/__init__.py +30 -25
  15. datamodel_code_generator/model/pydantic/base_model.py +304 -100
  16. datamodel_code_generator/model/pydantic/custom_root_type.py +11 -2
  17. datamodel_code_generator/model/pydantic/dataclass.py +15 -4
  18. datamodel_code_generator/model/pydantic/imports.py +40 -27
  19. datamodel_code_generator/model/pydantic/types.py +188 -96
  20. datamodel_code_generator/model/pydantic_v2/__init__.py +51 -0
  21. datamodel_code_generator/model/pydantic_v2/base_model.py +268 -0
  22. datamodel_code_generator/model/pydantic_v2/imports.py +15 -0
  23. datamodel_code_generator/model/pydantic_v2/root_model.py +35 -0
  24. datamodel_code_generator/model/pydantic_v2/types.py +143 -0
  25. datamodel_code_generator/model/scalar.py +124 -0
  26. datamodel_code_generator/model/template/Enum.jinja2 +15 -2
  27. datamodel_code_generator/model/template/ScalarTypeAliasAnnotation.jinja2 +6 -0
  28. datamodel_code_generator/model/template/ScalarTypeAliasType.jinja2 +6 -0
  29. datamodel_code_generator/model/template/ScalarTypeStatement.jinja2 +6 -0
  30. datamodel_code_generator/model/template/TypeAliasAnnotation.jinja2 +20 -0
  31. datamodel_code_generator/model/template/TypeAliasType.jinja2 +20 -0
  32. datamodel_code_generator/model/template/TypeStatement.jinja2 +20 -0
  33. datamodel_code_generator/model/template/TypedDict.jinja2 +5 -0
  34. datamodel_code_generator/model/template/TypedDictClass.jinja2 +25 -0
  35. datamodel_code_generator/model/template/TypedDictFunction.jinja2 +24 -0
  36. datamodel_code_generator/model/template/UnionTypeAliasAnnotation.jinja2 +10 -0
  37. datamodel_code_generator/model/template/UnionTypeAliasType.jinja2 +10 -0
  38. datamodel_code_generator/model/template/UnionTypeStatement.jinja2 +10 -0
  39. datamodel_code_generator/model/template/dataclass.jinja2 +50 -0
  40. datamodel_code_generator/model/template/msgspec.jinja2 +55 -0
  41. datamodel_code_generator/model/template/pydantic/BaseModel.jinja2 +17 -4
  42. datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2 +12 -4
  43. datamodel_code_generator/model/template/pydantic/Config.jinja2 +1 -1
  44. datamodel_code_generator/model/template/pydantic/dataclass.jinja2 +15 -2
  45. datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2 +57 -0
  46. datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2 +5 -0
  47. datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2 +48 -0
  48. datamodel_code_generator/model/type_alias.py +70 -0
  49. datamodel_code_generator/model/typed_dict.py +161 -0
  50. datamodel_code_generator/model/types.py +106 -0
  51. datamodel_code_generator/model/union.py +105 -0
  52. datamodel_code_generator/parser/__init__.py +30 -12
  53. datamodel_code_generator/parser/_graph.py +67 -0
  54. datamodel_code_generator/parser/_scc.py +171 -0
  55. datamodel_code_generator/parser/base.py +2426 -380
  56. datamodel_code_generator/parser/graphql.py +652 -0
  57. datamodel_code_generator/parser/jsonschema.py +2518 -647
  58. datamodel_code_generator/parser/openapi.py +631 -222
  59. datamodel_code_generator/py.typed +0 -0
  60. datamodel_code_generator/pydantic_patch.py +28 -0
  61. datamodel_code_generator/reference.py +672 -290
  62. datamodel_code_generator/types.py +521 -145
  63. datamodel_code_generator/util.py +155 -0
  64. datamodel_code_generator/watch.py +65 -0
  65. datamodel_code_generator-0.45.0.dist-info/METADATA +301 -0
  66. datamodel_code_generator-0.45.0.dist-info/RECORD +69 -0
  67. {datamodel_code_generator-0.11.12.dist-info → datamodel_code_generator-0.45.0.dist-info}/WHEEL +1 -1
  68. datamodel_code_generator-0.45.0.dist-info/entry_points.txt +2 -0
  69. datamodel_code_generator/version.py +0 -1
  70. datamodel_code_generator-0.11.12.dist-info/METADATA +0 -440
  71. datamodel_code_generator-0.11.12.dist-info/RECORD +0 -31
  72. datamodel_code_generator-0.11.12.dist-info/entry_points.txt +0 -3
  73. {datamodel_code_generator-0.11.12.dist-info → datamodel_code_generator-0.45.0.dist-info/licenses}/LICENSE +0 -0
@@ -1,111 +1,349 @@
1
+ """Code formatting utilities and Python version handling.
2
+
3
+ Provides CodeFormatter for applying black, isort, and ruff formatting,
4
+ along with PythonVersion enum and DatetimeClassType for output configuration.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import subprocess # noqa: S404
1
10
  from enum import Enum
11
+ from functools import cached_property, lru_cache
12
+ from importlib import import_module
2
13
  from pathlib import Path
3
- from typing import TYPE_CHECKING, Any, Dict, Optional
14
+ from typing import TYPE_CHECKING, Any
4
15
  from warnings import warn
5
16
 
6
- import black
7
- import isort
8
- import toml
17
+ from datamodel_code_generator.util import load_toml
18
+
19
+ if TYPE_CHECKING:
20
+ from collections.abc import Sequence
21
+
22
+
23
+ @lru_cache(maxsize=1)
24
+ def _get_black() -> Any:
25
+ import black as _black # noqa: PLC0415
26
+
27
+ return _black
28
+
29
+
30
+ @lru_cache(maxsize=1)
31
+ def _get_black_mode() -> Any: # pragma: no cover
32
+ black = _get_black()
33
+ try:
34
+ import black.mode # noqa: PLC0415
35
+ except ImportError:
36
+ return None
37
+ else:
38
+ return black.mode
39
+
40
+
41
+ @lru_cache(maxsize=1)
42
+ def _get_isort() -> Any:
43
+ import isort as _isort # noqa: PLC0415
44
+
45
+ return _isort
46
+
47
+
48
+ class DatetimeClassType(Enum):
49
+ """Output datetime class type options."""
50
+
51
+ Datetime = "datetime"
52
+ Awaredatetime = "AwareDatetime"
53
+ Naivedatetime = "NaiveDatetime"
9
54
 
10
55
 
11
56
  class PythonVersion(Enum):
12
- PY_36 = '3.6'
13
- PY_37 = '3.7'
14
- PY_38 = '3.8'
15
- PY_39 = '3.9'
57
+ """Supported Python version targets for code generation."""
58
+
59
+ PY_39 = "3.9"
60
+ PY_310 = "3.10"
61
+ PY_311 = "3.11"
62
+ PY_312 = "3.12"
63
+ PY_313 = "3.13"
64
+ PY_314 = "3.14"
65
+
66
+ @cached_property
67
+ def _is_py_310_or_later(self) -> bool: # pragma: no cover
68
+ return self.value != self.PY_39.value
69
+
70
+ @cached_property
71
+ def _is_py_311_or_later(self) -> bool: # pragma: no cover
72
+ return self.value not in {self.PY_39.value, self.PY_310.value}
73
+
74
+ @cached_property
75
+ def _is_py_312_or_later(self) -> bool: # pragma: no cover
76
+ return self.value not in {self.PY_39.value, self.PY_310.value, self.PY_311.value}
77
+
78
+ @cached_property
79
+ def _is_py_314_or_later(self) -> bool:
80
+ return self.value not in {
81
+ self.PY_39.value,
82
+ self.PY_310.value,
83
+ self.PY_311.value,
84
+ self.PY_312.value,
85
+ self.PY_313.value,
86
+ }
87
+
88
+ @property
89
+ def has_union_operator(self) -> bool: # pragma: no cover
90
+ """Check if Python version supports the union operator (|)."""
91
+ return self._is_py_310_or_later
92
+
93
+ @property
94
+ def has_typed_dict_non_required(self) -> bool:
95
+ """Check if Python version supports TypedDict NotRequired."""
96
+ return self._is_py_311_or_later
97
+
98
+ @property
99
+ def has_kw_only_dataclass(self) -> bool:
100
+ """Check if Python version supports kw_only in dataclasses."""
101
+ return self._is_py_310_or_later
16
102
 
17
103
  @property
18
- def has_literal_type(self) -> bool:
19
- return self.value >= self.PY_38.value # type: ignore
104
+ def has_type_alias(self) -> bool:
105
+ """Check if Python version supports TypeAlias."""
106
+ return self._is_py_310_or_later
20
107
 
108
+ @property
109
+ def has_type_statement(self) -> bool:
110
+ """Check if Python version supports type statements."""
111
+ return self._is_py_312_or_later
112
+
113
+ @property
114
+ def has_native_deferred_annotations(self) -> bool:
115
+ """Check if Python version has native deferred annotations (Python 3.14+)."""
116
+ return self._is_py_314_or_later
117
+
118
+ @property
119
+ def has_strenum(self) -> bool:
120
+ """Check if Python version supports StrEnum."""
121
+ return self._is_py_311_or_later
21
122
 
22
- BLACK_PYTHON_VERSION: Dict[PythonVersion, black.TargetVersion] = {
23
- v: getattr(black.TargetVersion, f'PY{v.name.split("_")[-1]}')
24
- for v in PythonVersion
25
- if hasattr(black.TargetVersion, f'PY{v.name.split("_")[-1]}')
26
- }
123
+
124
+ PythonVersionMin = PythonVersion.PY_39
125
+
126
+
127
+ @lru_cache(maxsize=1)
128
+ def _get_black_python_version_map() -> dict[PythonVersion, Any]:
129
+ black = _get_black()
130
+ return {
131
+ v: getattr(black.TargetVersion, f"PY{v.name.split('_')[-1]}")
132
+ for v in PythonVersion
133
+ if hasattr(black.TargetVersion, f"PY{v.name.split('_')[-1]}")
134
+ }
27
135
 
28
136
 
29
137
  def is_supported_in_black(python_version: PythonVersion) -> bool: # pragma: no cover
30
- return python_version in BLACK_PYTHON_VERSION
138
+ """Check if a Python version is supported by the installed black version."""
139
+ return python_version in _get_black_python_version_map()
140
+
141
+
142
+ def black_find_project_root(sources: Sequence[Path]) -> Path:
143
+ """Find the project root directory for black configuration."""
144
+ from black import find_project_root as _find_project_root # noqa: PLC0415
145
+
146
+ project_root = _find_project_root(tuple(str(s) for s in sources))
147
+ if isinstance(project_root, tuple):
148
+ return project_root[0]
149
+ return project_root # pragma: no cover
150
+
151
+
152
+ class Formatter(Enum):
153
+ """Available code formatters for generated output."""
154
+
155
+ BLACK = "black"
156
+ ISORT = "isort"
157
+ RUFF_CHECK = "ruff-check"
158
+ RUFF_FORMAT = "ruff-format"
159
+
160
+
161
+ DEFAULT_FORMATTERS = [Formatter.BLACK, Formatter.ISORT]
31
162
 
32
163
 
33
164
  class CodeFormatter:
34
- def __init__(
165
+ """Formats generated code using black, isort, ruff, and custom formatters."""
166
+
167
+ def __init__( # noqa: PLR0912, PLR0913, PLR0917
35
168
  self,
36
169
  python_version: PythonVersion,
37
- settings_path: Optional[Path] = None,
38
- wrap_string_literal: Optional[bool] = None,
39
- ):
170
+ settings_path: Path | None = None,
171
+ wrap_string_literal: bool | None = None, # noqa: FBT001
172
+ skip_string_normalization: bool = True, # noqa: FBT001, FBT002
173
+ known_third_party: list[str] | None = None,
174
+ custom_formatters: list[str] | None = None,
175
+ custom_formatters_kwargs: dict[str, Any] | None = None,
176
+ encoding: str = "utf-8",
177
+ formatters: list[Formatter] = DEFAULT_FORMATTERS,
178
+ ) -> None:
179
+ """Initialize code formatter with configuration for black, isort, ruff, and custom formatters."""
40
180
  if not settings_path:
41
- settings_path = Path().resolve()
181
+ settings_path = Path.cwd()
182
+ elif settings_path.is_file():
183
+ settings_path = settings_path.parent
184
+ elif not settings_path.exists():
185
+ for parent in settings_path.parents:
186
+ if parent.exists():
187
+ settings_path = parent
188
+ break
189
+ else:
190
+ settings_path = Path.cwd() # pragma: no cover
42
191
 
43
- root = black.find_project_root((settings_path,))
192
+ root = black_find_project_root((settings_path,))
44
193
  path = root / "pyproject.toml"
45
194
  if path.is_file():
46
- value = str(path)
47
- pyproject_toml = toml.load(value)
195
+ pyproject_toml = load_toml(path)
48
196
  config = pyproject_toml.get("tool", {}).get("black", {})
49
197
  else:
50
198
  config = {}
51
199
 
52
- black_kwargs: Dict[str, Any] = {}
200
+ black = _get_black()
201
+ black_mode = _get_black_mode()
202
+ isort = _get_isort()
203
+
204
+ black_kwargs: dict[str, Any] = {}
53
205
  if wrap_string_literal is not None:
54
206
  experimental_string_processing = wrap_string_literal
207
+ elif black.__version__ < "24.1.0":
208
+ experimental_string_processing = config.get("experimental-string-processing")
55
209
  else:
56
- experimental_string_processing = config.get(
57
- 'experimental-string-processing'
210
+ experimental_string_processing = config.get("preview", False) and ( # pragma: no cover
211
+ config.get("unstable", False) or "string_processing" in config.get("enable-unstable-feature", [])
58
212
  )
59
213
 
60
214
  if experimental_string_processing is not None: # pragma: no cover
61
- if black.__version__.startswith('19.'): # type: ignore
215
+ if black.__version__.startswith("19."):
62
216
  warn(
63
- f'black doesn\'t support `experimental-string-processing` option' # type: ignore
64
- f' for wrapping string literal in {black.__version__}'
217
+ f"black doesn't support `experimental-string-processing` option"
218
+ f" for wrapping string literal in {black.__version__}",
219
+ stacklevel=2,
65
220
  )
66
- else:
67
- black_kwargs[
68
- 'experimental_string_processing'
69
- ] = experimental_string_processing
221
+ elif black.__version__ < "24.1.0":
222
+ black_kwargs["experimental_string_processing"] = experimental_string_processing
223
+ elif experimental_string_processing:
224
+ black_kwargs["preview"] = True
225
+ black_kwargs["unstable"] = config.get("unstable", False)
226
+ black_kwargs["enabled_features"] = {black_mode.Preview.string_processing}
70
227
 
71
- if TYPE_CHECKING:
72
- self.back_mode: black.FileMode
73
- else:
74
- self.back_mode = black.FileMode(
75
- target_versions={BLACK_PYTHON_VERSION[python_version]},
76
- line_length=config.get("line-length", black.DEFAULT_LINE_LENGTH),
77
- string_normalization=not config.get("skip-string-normalization", True),
78
- **black_kwargs,
79
- )
228
+ self.black_mode = black.FileMode(
229
+ target_versions={_get_black_python_version_map()[python_version]},
230
+ line_length=config.get("line-length", black.DEFAULT_LINE_LENGTH),
231
+ string_normalization=not skip_string_normalization or not config.get("skip-string-normalization", True),
232
+ **black_kwargs,
233
+ )
80
234
 
81
235
  self.settings_path: str = str(settings_path)
82
- if isort.__version__.startswith('4.'):
236
+
237
+ self.isort_config_kwargs: dict[str, Any] = {}
238
+ if known_third_party:
239
+ self.isort_config_kwargs["known_third_party"] = known_third_party
240
+
241
+ if isort.__version__.startswith("4."): # pragma: no cover
83
242
  self.isort_config = None
84
243
  else:
85
- self.isort_config = isort.Config(settings_path=self.settings_path)
244
+ self.isort_config = isort.Config(settings_path=self.settings_path, **self.isort_config_kwargs)
245
+
246
+ self.custom_formatters_kwargs = custom_formatters_kwargs or {}
247
+ self.custom_formatters = self._check_custom_formatters(custom_formatters)
248
+ self.encoding = encoding
249
+ self.formatters = formatters
250
+
251
+ def _load_custom_formatter(self, custom_formatter_import: str) -> CustomCodeFormatter:
252
+ """Load and instantiate a custom formatter from a module path."""
253
+ import_ = import_module(custom_formatter_import)
254
+
255
+ if not hasattr(import_, "CodeFormatter"):
256
+ msg = f"Custom formatter module `{import_.__name__}` must contains object with name CodeFormatter"
257
+ raise NameError(msg)
258
+
259
+ formatter_class = import_.__getattribute__("CodeFormatter") # noqa: PLC2801
260
+
261
+ if not issubclass(formatter_class, CustomCodeFormatter):
262
+ msg = f"The custom module {custom_formatter_import} must inherit from `datamodel-code-generator`"
263
+ raise TypeError(msg)
264
+
265
+ return formatter_class(formatter_kwargs=self.custom_formatters_kwargs)
266
+
267
+ def _check_custom_formatters(self, custom_formatters: list[str] | None) -> list[CustomCodeFormatter]:
268
+ """Validate and load all custom formatters."""
269
+ if custom_formatters is None:
270
+ return []
271
+
272
+ return [self._load_custom_formatter(custom_formatter_import) for custom_formatter_import in custom_formatters]
86
273
 
87
274
  def format_code(
88
275
  self,
89
276
  code: str,
90
277
  ) -> str:
91
- code = self.apply_isort(code)
92
- code = self.apply_black(code)
278
+ """Apply all configured formatters to the code string."""
279
+ if Formatter.ISORT in self.formatters:
280
+ code = self.apply_isort(code)
281
+ if Formatter.BLACK in self.formatters:
282
+ code = self.apply_black(code)
283
+
284
+ if Formatter.RUFF_CHECK in self.formatters:
285
+ code = self.apply_ruff_lint(code)
286
+
287
+ if Formatter.RUFF_FORMAT in self.formatters:
288
+ code = self.apply_ruff_formatter(code)
289
+
290
+ for formatter in self.custom_formatters:
291
+ code = formatter.apply(code)
292
+
93
293
  return code
94
294
 
95
295
  def apply_black(self, code: str) -> str:
296
+ """Format code using black."""
297
+ black = _get_black()
96
298
  return black.format_str(
97
299
  code,
98
- mode=self.back_mode,
300
+ mode=self.black_mode,
301
+ )
302
+
303
+ def apply_ruff_lint(self, code: str) -> str:
304
+ """Run ruff check with auto-fix on code."""
305
+ result = subprocess.run(
306
+ ("ruff", "check", "--fix", "-"),
307
+ input=code.encode(self.encoding),
308
+ capture_output=True,
309
+ check=False,
310
+ cwd=self.settings_path,
99
311
  )
312
+ return result.stdout.decode(self.encoding)
100
313
 
101
- if isort.__version__.startswith('4.'):
314
+ def apply_ruff_formatter(self, code: str) -> str:
315
+ """Format code using ruff format."""
316
+ result = subprocess.run(
317
+ ("ruff", "format", "-"),
318
+ input=code.encode(self.encoding),
319
+ capture_output=True,
320
+ check=False,
321
+ cwd=self.settings_path,
322
+ )
323
+ return result.stdout.decode(self.encoding)
102
324
 
103
- def apply_isort(self, code: str) -> str:
325
+ def apply_isort(self, code: str) -> str:
326
+ """Sort imports using isort."""
327
+ isort = _get_isort()
328
+ if self.isort_config is None: # pragma: no cover
104
329
  return isort.SortImports(
105
- file_contents=code, settings_path=self.settings_path
330
+ file_contents=code,
331
+ settings_path=self.settings_path,
332
+ **self.isort_config_kwargs,
106
333
  ).output
334
+ return isort.code(code, config=self.isort_config)
107
335
 
108
- else:
109
336
 
110
- def apply_isort(self, code: str) -> str:
111
- return isort.code(code, config=self.isort_config)
337
+ class CustomCodeFormatter:
338
+ """Base class for custom code formatters.
339
+
340
+ Subclasses must implement the apply() method to transform code.
341
+ """
342
+
343
+ def __init__(self, formatter_kwargs: dict[str, Any]) -> None:
344
+ """Initialize custom formatter with optional keyword arguments."""
345
+ self.formatter_kwargs = formatter_kwargs
346
+
347
+ def apply(self, code: str) -> str:
348
+ """Apply formatting to code. Must be implemented by subclasses."""
349
+ raise NotImplementedError
@@ -1,16 +1,91 @@
1
- from typing import Optional, Sequence, Tuple
1
+ """HTTP utilities for fetching remote schema files.
2
2
 
3
- try:
4
- import httpx
5
- except ImportError: # pragma: no cover
6
- raise Exception(
7
- 'Please run $pip install datamodel-code-generator[http] to resolve URL Reference'
8
- )
3
+ Provides functions to fetch schema content from URLs and join URL references.
4
+ HTTP(S) URLs require the 'http' extra: `pip install 'datamodel-code-generator[http]'`.
5
+ file:// URLs are handled without additional dependencies.
6
+ """
9
7
 
8
+ from __future__ import annotations
10
9
 
11
- def get_body(url: str, headers: Optional[Sequence[Tuple[str, str]]] = None) -> str:
12
- return httpx.get(url, headers=headers).text
10
+ from typing import TYPE_CHECKING, Any
13
11
 
12
+ if TYPE_CHECKING:
13
+ from collections.abc import Sequence
14
14
 
15
- def join_url(url: str, ref: str = '.') -> str:
15
+
16
+ def _get_httpx() -> Any:
17
+ """Lazily import httpx, raising a helpful error if not installed."""
18
+ try:
19
+ import httpx # noqa: PLC0415
20
+ except ImportError as exc: # pragma: no cover
21
+ msg = "Please run `$pip install 'datamodel-code-generator[http]`' to resolve HTTP(S) URL references"
22
+ raise Exception(msg) from exc # noqa: TRY002
23
+ return httpx
24
+
25
+
26
+ def get_body(
27
+ url: str,
28
+ headers: Sequence[tuple[str, str]] | None = None,
29
+ ignore_tls: bool = False, # noqa: FBT001, FBT002
30
+ query_parameters: Sequence[tuple[str, str]] | None = None,
31
+ ) -> str:
32
+ """Fetch content from a URL with optional headers and query parameters."""
33
+ httpx = _get_httpx()
34
+ return httpx.get(
35
+ url,
36
+ headers=headers,
37
+ verify=not ignore_tls,
38
+ follow_redirects=True,
39
+ params=query_parameters, # pyright: ignore[reportArgumentType]
40
+ # TODO: Improve params type
41
+ ).text
42
+
43
+
44
+ def join_url(url: str, ref: str = ".") -> str: # noqa: PLR0912
45
+ """Join a base URL with a relative reference."""
46
+ if url.startswith("file://"):
47
+ from urllib.parse import urlparse # noqa: PLC0415
48
+
49
+ parsed = urlparse(url)
50
+
51
+ if ref.startswith("file://"):
52
+ return ref
53
+
54
+ ref_path, *frag = ref.split("#", 1)
55
+
56
+ # Fragment-only ref: keep the original path
57
+ if not ref_path:
58
+ joined = url.split("#", maxsplit=1)[0]
59
+ if frag:
60
+ joined += f"#{frag[0]}"
61
+ return joined
62
+
63
+ if ref_path.startswith("/"):
64
+ joined_path = ref_path
65
+ else:
66
+ base_segments = parsed.path.lstrip("/").split("/")
67
+ if base_segments and not base_segments[0]:
68
+ base_segments = []
69
+ if base_segments:
70
+ base_segments = base_segments[:-1]
71
+
72
+ min_depth = 1 if parsed.netloc else 0
73
+ for segment in ref_path.split("/"):
74
+ if segment in {"", "."}:
75
+ continue
76
+ if segment == "..":
77
+ if len(base_segments) > min_depth:
78
+ base_segments.pop()
79
+ continue
80
+ base_segments.append(segment)
81
+
82
+ joined_path = "/" + "/".join(base_segments)
83
+ if ref_path.endswith("/"):
84
+ joined_path += "/"
85
+
86
+ joined = f"file://{parsed.netloc}{joined_path}"
87
+ if frag:
88
+ joined += f"#{frag[0]}"
89
+ return joined
90
+ httpx = _get_httpx()
16
91
  return str(httpx.URL(url).join(ref))