pydantic-fixturegen 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pydantic-fixturegen might be problematic. Click here for more details.

Files changed (41) hide show
  1. pydantic_fixturegen/__init__.py +7 -0
  2. pydantic_fixturegen/cli/__init__.py +85 -0
  3. pydantic_fixturegen/cli/doctor.py +235 -0
  4. pydantic_fixturegen/cli/gen/__init__.py +23 -0
  5. pydantic_fixturegen/cli/gen/_common.py +139 -0
  6. pydantic_fixturegen/cli/gen/explain.py +145 -0
  7. pydantic_fixturegen/cli/gen/fixtures.py +283 -0
  8. pydantic_fixturegen/cli/gen/json.py +262 -0
  9. pydantic_fixturegen/cli/gen/schema.py +164 -0
  10. pydantic_fixturegen/cli/list.py +164 -0
  11. pydantic_fixturegen/core/__init__.py +103 -0
  12. pydantic_fixturegen/core/ast_discover.py +169 -0
  13. pydantic_fixturegen/core/config.py +440 -0
  14. pydantic_fixturegen/core/errors.py +136 -0
  15. pydantic_fixturegen/core/generate.py +311 -0
  16. pydantic_fixturegen/core/introspect.py +141 -0
  17. pydantic_fixturegen/core/io_utils.py +77 -0
  18. pydantic_fixturegen/core/providers/__init__.py +32 -0
  19. pydantic_fixturegen/core/providers/collections.py +74 -0
  20. pydantic_fixturegen/core/providers/identifiers.py +68 -0
  21. pydantic_fixturegen/core/providers/numbers.py +133 -0
  22. pydantic_fixturegen/core/providers/registry.py +98 -0
  23. pydantic_fixturegen/core/providers/strings.py +109 -0
  24. pydantic_fixturegen/core/providers/temporal.py +42 -0
  25. pydantic_fixturegen/core/safe_import.py +403 -0
  26. pydantic_fixturegen/core/schema.py +320 -0
  27. pydantic_fixturegen/core/seed.py +154 -0
  28. pydantic_fixturegen/core/strategies.py +193 -0
  29. pydantic_fixturegen/core/version.py +52 -0
  30. pydantic_fixturegen/emitters/__init__.py +15 -0
  31. pydantic_fixturegen/emitters/json_out.py +373 -0
  32. pydantic_fixturegen/emitters/pytest_codegen.py +365 -0
  33. pydantic_fixturegen/emitters/schema_out.py +84 -0
  34. pydantic_fixturegen/plugins/builtin.py +45 -0
  35. pydantic_fixturegen/plugins/hookspecs.py +59 -0
  36. pydantic_fixturegen/plugins/loader.py +72 -0
  37. pydantic_fixturegen-1.0.0.dist-info/METADATA +280 -0
  38. pydantic_fixturegen-1.0.0.dist-info/RECORD +41 -0
  39. pydantic_fixturegen-1.0.0.dist-info/WHEEL +4 -0
  40. pydantic_fixturegen-1.0.0.dist-info/entry_points.txt +5 -0
  41. pydantic_fixturegen-1.0.0.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,403 @@
1
+ """Safe import runner that executes user modules in a constrained subprocess."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import json
6
+ import os
7
+ import subprocess
8
+ import sys
9
+ import textwrap
10
+ from collections.abc import Mapping, Sequence
11
+ from dataclasses import dataclass
12
+ from pathlib import Path
13
+ from typing import Any
14
+
15
+ EXIT_TIMEOUT = 40
16
+
17
+ _PROTECTED_ENV_KEYS = {
18
+ "PYTHONPATH",
19
+ "PYTHONSAFEPATH",
20
+ "PYTHONNOUSERSITE",
21
+ "NO_PROXY",
22
+ "no_proxy",
23
+ "http_proxy",
24
+ "https_proxy",
25
+ "HTTP_PROXY",
26
+ "HTTPS_PROXY",
27
+ "ALL_PROXY",
28
+ "all_proxy",
29
+ "TMPDIR",
30
+ "TMP",
31
+ "TEMP",
32
+ "HOME",
33
+ }
34
+
35
+
36
+ @dataclass(slots=True)
37
+ class SafeImportResult:
38
+ """Outcome from executing the safe import subprocess."""
39
+
40
+ success: bool
41
+ models: list[dict[str, Any]]
42
+ error: str | None
43
+ traceback: str | None
44
+ stderr: str
45
+ exit_code: int
46
+
47
+
48
+ def safe_import_models(
49
+ paths: Sequence[Path | str],
50
+ *,
51
+ cwd: Path | str | None = None,
52
+ timeout: float = 5.0,
53
+ memory_limit_mb: int = 256,
54
+ python_executable: str | None = None,
55
+ extra_env: Mapping[str, str] | None = None,
56
+ ) -> SafeImportResult:
57
+ """Import one or more modules in a sandboxed subprocess and collect Pydantic models.
58
+
59
+ Args:
60
+ paths: Iterable of file paths to Python modules.
61
+ cwd: Working directory for the subprocess (defaults to current working directory).
62
+ timeout: Seconds before the subprocess is terminated with exit code 40.
63
+ memory_limit_mb: Soft memory cap applied inside the subprocess.
64
+ python_executable: Python interpreter to use (defaults to `sys.executable`).
65
+ extra_env: Additional environment variables to expose to the subprocess.
66
+ """
67
+ if not paths:
68
+ return SafeImportResult(True, [], None, None, "", 0)
69
+
70
+ workdir = Path(cwd) if cwd else Path.cwd()
71
+ python = python_executable or sys.executable
72
+
73
+ request = {
74
+ "paths": [str(Path(path).resolve()) for path in paths],
75
+ "memory_limit_mb": memory_limit_mb,
76
+ "workdir": str(workdir.resolve()),
77
+ }
78
+
79
+ env = _build_env(workdir, extra_env)
80
+
81
+ try:
82
+ completed = subprocess.run(
83
+ [python, "-c", _RUNNER_SNIPPET],
84
+ input=json.dumps(request),
85
+ text=True,
86
+ capture_output=True,
87
+ env=env,
88
+ cwd=str(workdir),
89
+ timeout=timeout,
90
+ )
91
+ except subprocess.TimeoutExpired as exc:
92
+ return SafeImportResult(
93
+ success=False,
94
+ models=[],
95
+ error="Safe import timed out.",
96
+ traceback=None,
97
+ stderr=_safe_text(exc.stderr),
98
+ exit_code=EXIT_TIMEOUT,
99
+ )
100
+
101
+ stdout = completed.stdout.strip()
102
+ stderr = completed.stderr
103
+ exit_code = completed.returncode
104
+
105
+ if not stdout:
106
+ return SafeImportResult(
107
+ success=False,
108
+ models=[],
109
+ error="Safe import produced no output.",
110
+ traceback=None,
111
+ stderr=stderr,
112
+ exit_code=exit_code or 1,
113
+ )
114
+
115
+ try:
116
+ payload = json.loads(stdout)
117
+ except json.JSONDecodeError as exc:
118
+ return SafeImportResult(
119
+ success=False,
120
+ models=[],
121
+ error=f"Failed to decode safe-import payload: {exc}",
122
+ traceback=None,
123
+ stderr=stderr,
124
+ exit_code=exit_code or 1,
125
+ )
126
+
127
+ success = bool(payload.get("success"))
128
+ models = payload.get("models") or []
129
+ error = payload.get("error")
130
+ traceback_text = payload.get("traceback")
131
+
132
+ return SafeImportResult(
133
+ success=success,
134
+ models=models,
135
+ error=error,
136
+ traceback=traceback_text,
137
+ stderr=stderr,
138
+ exit_code=exit_code,
139
+ )
140
+
141
+
142
+ # Internal helpers -----------------------------------------------------------------
143
+
144
+
145
+ def _safe_text(value: object) -> str:
146
+ return value.decode("utf-8", "replace") if isinstance(value, bytes) else str(value or "")
147
+
148
+
149
+ def _build_env(workdir: Path, extra_env: Mapping[str, str] | None) -> dict[str, str]:
150
+ base_env: dict[str, str] = {
151
+ "PYTHONSAFEPATH": "1",
152
+ "PYTHONPATH": str(workdir),
153
+ "NO_PROXY": "*",
154
+ "no_proxy": "*",
155
+ "http_proxy": "",
156
+ "https_proxy": "",
157
+ "HTTP_PROXY": "",
158
+ "HTTPS_PROXY": "",
159
+ "ALL_PROXY": "",
160
+ "all_proxy": "",
161
+ "PYTHONNOUSERSITE": "1",
162
+ "TMPDIR": str(workdir),
163
+ "TMP": str(workdir),
164
+ "TEMP": str(workdir),
165
+ "HOME": str(workdir),
166
+ }
167
+
168
+ allowed_passthrough = ["PATH", "SYSTEMROOT", "COMSPEC"]
169
+ for key in allowed_passthrough:
170
+ if key in os.environ:
171
+ base_env[key] = os.environ[key]
172
+
173
+ if extra_env:
174
+ for key, value in extra_env.items():
175
+ if key in _PROTECTED_ENV_KEYS:
176
+ continue
177
+ base_env[key] = value
178
+
179
+ return base_env
180
+
181
+
182
+ _RUNNER_SNIPPET = textwrap.dedent(
183
+ """
184
+ import builtins
185
+ import json
186
+ import os
187
+ import sys
188
+ import traceback
189
+ from importlib import util as importlib_util
190
+ from pathlib import Path
191
+
192
+ def _apply_resource_limits(limit_mb: int) -> None:
193
+ try:
194
+ import resource
195
+ except ImportError: # pragma: no cover
196
+ return
197
+
198
+ bytes_limit = max(1, limit_mb) * 1024 * 1024
199
+ for res_name in ("RLIMIT_AS", "RLIMIT_DATA"):
200
+ res = getattr(resource, res_name, None)
201
+ if res is None:
202
+ continue
203
+ soft, hard = resource.getrlimit(res)
204
+ hard_limit = bytes_limit
205
+ if hard not in (resource.RLIM_INFINITY, None) and hard < bytes_limit:
206
+ hard_limit = hard
207
+
208
+ if soft in (resource.RLIM_INFINITY, None) or soft > hard_limit:
209
+ soft_limit = hard_limit
210
+ else:
211
+ soft_limit = soft
212
+
213
+ try:
214
+ resource.setrlimit(res, (soft_limit, hard_limit))
215
+ except (ValueError, OSError): # pragma: no cover
216
+ continue
217
+
218
+ def _block_network() -> None:
219
+ import socket
220
+
221
+ class _ProtectedSocket(socket.socket):
222
+ def __init__(self, *args, **kwargs): # type: ignore[no-untyped-def]
223
+ raise RuntimeError("network access disabled in safe-import")
224
+
225
+ def connect(self, *args, **kwargs): # type: ignore[override]
226
+ raise RuntimeError("network access disabled in safe-import")
227
+
228
+ def connect_ex(self, *args, **kwargs): # type: ignore[override]
229
+ raise RuntimeError("network access disabled in safe-import")
230
+
231
+ def _blocked(*_args, **_kwargs):
232
+ raise RuntimeError("network access disabled in safe-import")
233
+
234
+ socket.socket = _ProtectedSocket # type: ignore[assignment]
235
+ socket.create_connection = _blocked # type: ignore[assignment]
236
+ socket.socketpair = _blocked # type: ignore[assignment]
237
+ socket.create_server = _blocked # type: ignore[assignment]
238
+ socket.getaddrinfo = _blocked # type: ignore[assignment]
239
+ socket.gethostbyname = _blocked # type: ignore[assignment]
240
+ socket.gethostbyaddr = _blocked # type: ignore[assignment]
241
+
242
+
243
+ def _restrict_filesystem(root: Path) -> None:
244
+ import io
245
+
246
+ allowed_root = root.resolve()
247
+
248
+ def _normalize_candidate(candidate: object) -> Path | None:
249
+ if isinstance(candidate, int):
250
+ return None
251
+ if isinstance(candidate, (str, bytes, os.PathLike)):
252
+ path = Path(candidate)
253
+ else:
254
+ return None
255
+ if not path.is_absolute():
256
+ path = (Path.cwd() / path).resolve()
257
+ else:
258
+ path = path.resolve()
259
+ return path
260
+
261
+ def _ensure_allowed(candidate: object) -> None:
262
+ normalized = _normalize_candidate(candidate)
263
+ if normalized is None:
264
+ return
265
+ try:
266
+ normalized.relative_to(allowed_root)
267
+ except ValueError:
268
+ raise RuntimeError("filesystem writes outside the sandbox are not permitted")
269
+
270
+ def _needs_write(mode: str) -> bool:
271
+ return any(flag in mode for flag in ("w", "a", "x", "+"))
272
+
273
+ original_open = builtins.open
274
+
275
+ def _guarded_open(file, mode="r", *args, **kwargs): # type: ignore[no-untyped-def]
276
+ if _needs_write(mode):
277
+ _ensure_allowed(file)
278
+ return original_open(file, mode, *args, **kwargs)
279
+
280
+ builtins.open = _guarded_open # type: ignore[assignment]
281
+
282
+ original_io_open = io.open
283
+
284
+ def _guarded_io_open(file, mode="r", *args, **kwargs): # type: ignore[no-untyped-def]
285
+ if _needs_write(mode):
286
+ _ensure_allowed(file)
287
+ return original_io_open(file, mode, *args, **kwargs)
288
+
289
+ io.open = _guarded_io_open # type: ignore[assignment]
290
+
291
+ original_os_open = os.open
292
+
293
+ def _guarded_os_open(path, flags, mode=0o777): # type: ignore[no-untyped-def]
294
+ needs_write = bool(
295
+ flags
296
+ & (
297
+ os.O_WRONLY
298
+ | os.O_RDWR
299
+ | os.O_APPEND
300
+ | os.O_CREAT
301
+ | os.O_TRUNC
302
+ )
303
+ )
304
+ if needs_write:
305
+ _ensure_allowed(path)
306
+ return original_os_open(path, flags, mode)
307
+
308
+ os.open = _guarded_os_open # type: ignore[assignment]
309
+
310
+ original_path_write_text = Path.write_text
311
+
312
+ def _guarded_write_text(self, *args, **kwargs): # type: ignore[no-untyped-def]
313
+ _ensure_allowed(self)
314
+ return original_path_write_text(self, *args, **kwargs)
315
+
316
+ Path.write_text = _guarded_write_text # type: ignore[assignment]
317
+
318
+ original_path_write_bytes = Path.write_bytes
319
+
320
+ def _guarded_write_bytes(self, *args, **kwargs): # type: ignore[no-untyped-def]
321
+ _ensure_allowed(self)
322
+ return original_path_write_bytes(self, *args, **kwargs)
323
+
324
+ Path.write_bytes = _guarded_write_bytes # type: ignore[assignment]
325
+
326
+ original_path_touch = Path.touch
327
+
328
+ def _guarded_touch(self, *args, **kwargs): # type: ignore[no-untyped-def]
329
+ _ensure_allowed(self)
330
+ return original_path_touch(self, *args, **kwargs)
331
+
332
+ Path.touch = _guarded_touch # type: ignore[assignment]
333
+
334
+ def _derive_module_name(module_path: Path, index: int) -> str:
335
+ stem = module_path.stem or "module"
336
+ return stem if index == 0 else f"{stem}_{index}"
337
+
338
+ def _load_module(module_path: Path, index: int):
339
+ module_name = _derive_module_name(module_path, index)
340
+ spec = importlib_util.spec_from_file_location(module_name, module_path)
341
+ if spec is None or spec.loader is None:
342
+ raise ImportError(f"Could not load module from {module_path}")
343
+ module = importlib_util.module_from_spec(spec)
344
+ sys.modules[spec.name] = module
345
+ spec.loader.exec_module(module)
346
+ return module
347
+
348
+ def _collect_models(module, module_path: Path):
349
+ models = []
350
+ try:
351
+ from pydantic import BaseModel
352
+ except Exception: # pragma: no cover - in absence of pydantic
353
+ BaseModel = None
354
+
355
+ for attr_name, attr_value in vars(module).items():
356
+ if getattr(attr_value, "__module__", None) != module.__name__:
357
+ continue
358
+ if BaseModel is None:
359
+ continue
360
+ if isinstance(attr_value, type) and issubclass(attr_value, BaseModel):
361
+ models.append(
362
+ {
363
+ "module": module.__name__,
364
+ "name": attr_value.__name__,
365
+ "qualname": f"{module.__name__}.{attr_value.__name__}",
366
+ "path": str(module_path),
367
+ }
368
+ )
369
+ return models
370
+
371
+ def main() -> None:
372
+ request = json.loads(sys.stdin.read())
373
+
374
+ workdir = Path(request.get("workdir") or os.getcwd())
375
+ os.chdir(workdir)
376
+
377
+ _apply_resource_limits(int(request.get("memory_limit_mb", 256)))
378
+ _block_network()
379
+ _restrict_filesystem(workdir)
380
+
381
+ paths = [Path(path) for path in request.get("paths", [])]
382
+
383
+ collected = []
384
+ for idx, path in enumerate(paths):
385
+ module_path = Path(path)
386
+ module = _load_module(module_path, idx)
387
+ collected.extend(_collect_models(module, module_path))
388
+
389
+ payload = {"success": True, "models": collected}
390
+ json.dump(payload, sys.stdout)
391
+
392
+ if __name__ == "__main__":
393
+ try:
394
+ main()
395
+ except Exception as exc: # pragma: no cover
396
+ payload = {
397
+ "success": False,
398
+ "error": str(exc),
399
+ "traceback": traceback.format_exc(),
400
+ }
401
+ json.dump(payload, sys.stdout)
402
+ """
403
+ )