pydantic-fixturegen 1.0.0__py3-none-any.whl → 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pydantic-fixturegen might be problematic. Click here for more details.
- pydantic_fixturegen/api/__init__.py +137 -0
- pydantic_fixturegen/api/_runtime.py +726 -0
- pydantic_fixturegen/api/models.py +73 -0
- pydantic_fixturegen/cli/__init__.py +32 -1
- pydantic_fixturegen/cli/check.py +230 -0
- pydantic_fixturegen/cli/diff.py +992 -0
- pydantic_fixturegen/cli/doctor.py +188 -35
- pydantic_fixturegen/cli/gen/_common.py +134 -7
- pydantic_fixturegen/cli/gen/explain.py +597 -40
- pydantic_fixturegen/cli/gen/fixtures.py +244 -112
- pydantic_fixturegen/cli/gen/json.py +229 -138
- pydantic_fixturegen/cli/gen/schema.py +170 -85
- pydantic_fixturegen/cli/init.py +333 -0
- pydantic_fixturegen/cli/schema.py +45 -0
- pydantic_fixturegen/cli/watch.py +126 -0
- pydantic_fixturegen/core/config.py +137 -3
- pydantic_fixturegen/core/config_schema.py +178 -0
- pydantic_fixturegen/core/constraint_report.py +305 -0
- pydantic_fixturegen/core/errors.py +42 -0
- pydantic_fixturegen/core/field_policies.py +100 -0
- pydantic_fixturegen/core/generate.py +241 -37
- pydantic_fixturegen/core/io_utils.py +10 -2
- pydantic_fixturegen/core/path_template.py +197 -0
- pydantic_fixturegen/core/presets.py +73 -0
- pydantic_fixturegen/core/providers/temporal.py +10 -0
- pydantic_fixturegen/core/safe_import.py +146 -12
- pydantic_fixturegen/core/seed_freeze.py +176 -0
- pydantic_fixturegen/emitters/json_out.py +65 -16
- pydantic_fixturegen/emitters/pytest_codegen.py +68 -13
- pydantic_fixturegen/emitters/schema_out.py +27 -3
- pydantic_fixturegen/logging.py +114 -0
- pydantic_fixturegen/schemas/config.schema.json +244 -0
- pydantic_fixturegen-1.1.0.dist-info/METADATA +173 -0
- pydantic_fixturegen-1.1.0.dist-info/RECORD +57 -0
- pydantic_fixturegen-1.0.0.dist-info/METADATA +0 -280
- pydantic_fixturegen-1.0.0.dist-info/RECORD +0 -41
- {pydantic_fixturegen-1.0.0.dist-info → pydantic_fixturegen-1.1.0.dist-info}/WHEEL +0 -0
- {pydantic_fixturegen-1.0.0.dist-info → pydantic_fixturegen-1.1.0.dist-info}/entry_points.txt +0 -0
- {pydantic_fixturegen-1.0.0.dist-info → pydantic_fixturegen-1.1.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
"""Data models for the public Python API."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import datetime as _dt
|
|
6
|
+
from collections.abc import Mapping
|
|
7
|
+
from dataclasses import dataclass
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
from pydantic import BaseModel
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass(slots=True)
|
|
15
|
+
class ConfigSnapshot:
|
|
16
|
+
"""Relevant configuration details captured during generation."""
|
|
17
|
+
|
|
18
|
+
seed: int | str | None
|
|
19
|
+
include: tuple[str, ...]
|
|
20
|
+
exclude: tuple[str, ...]
|
|
21
|
+
time_anchor: _dt.datetime | None
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@dataclass(slots=True)
|
|
25
|
+
class JsonGenerationResult:
|
|
26
|
+
"""Result payload returned from :func:`pydantic_fixturegen.api.generate_json`."""
|
|
27
|
+
|
|
28
|
+
paths: tuple[Path, ...]
|
|
29
|
+
base_output: Path
|
|
30
|
+
model: type[BaseModel] | None
|
|
31
|
+
config: ConfigSnapshot
|
|
32
|
+
constraint_summary: Mapping[str, Any] | None
|
|
33
|
+
warnings: tuple[str, ...]
|
|
34
|
+
delegated: bool
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@dataclass(slots=True)
|
|
38
|
+
class FixturesGenerationResult:
|
|
39
|
+
"""Result payload for pytest fixture emission."""
|
|
40
|
+
|
|
41
|
+
path: Path | None
|
|
42
|
+
base_output: Path
|
|
43
|
+
models: tuple[type[BaseModel], ...]
|
|
44
|
+
config: ConfigSnapshot
|
|
45
|
+
metadata: Mapping[str, Any] | None
|
|
46
|
+
warnings: tuple[str, ...]
|
|
47
|
+
constraint_summary: Mapping[str, Any] | None
|
|
48
|
+
skipped: bool
|
|
49
|
+
delegated: bool
|
|
50
|
+
style: str
|
|
51
|
+
scope: str
|
|
52
|
+
return_type: str
|
|
53
|
+
cases: int
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
@dataclass(slots=True)
|
|
57
|
+
class SchemaGenerationResult:
|
|
58
|
+
"""Result payload for JSON Schema emission."""
|
|
59
|
+
|
|
60
|
+
path: Path | None
|
|
61
|
+
base_output: Path
|
|
62
|
+
models: tuple[type[BaseModel], ...]
|
|
63
|
+
config: ConfigSnapshot
|
|
64
|
+
warnings: tuple[str, ...]
|
|
65
|
+
delegated: bool
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
__all__ = [
|
|
69
|
+
"ConfigSnapshot",
|
|
70
|
+
"FixturesGenerationResult",
|
|
71
|
+
"JsonGenerationResult",
|
|
72
|
+
"SchemaGenerationResult",
|
|
73
|
+
]
|
|
@@ -8,6 +8,9 @@ from importlib import import_module
|
|
|
8
8
|
import typer
|
|
9
9
|
from typer.main import get_command
|
|
10
10
|
|
|
11
|
+
from pydantic_fixturegen.cli import schema as schema_cli
|
|
12
|
+
from pydantic_fixturegen.logging import DEFAULT_VERBOSITY_INDEX, LOG_LEVEL_ORDER, get_logger
|
|
13
|
+
|
|
11
14
|
|
|
12
15
|
def _load_typer(import_path: str) -> typer.Typer:
|
|
13
16
|
module_name, attr = import_path.split(":", 1)
|
|
@@ -42,7 +45,18 @@ app = typer.Typer(
|
|
|
42
45
|
|
|
43
46
|
|
|
44
47
|
@app.callback(invoke_without_command=True)
|
|
45
|
-
def _root(
|
|
48
|
+
def _root(
|
|
49
|
+
ctx: typer.Context,
|
|
50
|
+
verbose: int = typer.Option(0, "--verbose", "-v", count=True, help="Increase log verbosity."),
|
|
51
|
+
quiet: int = typer.Option(0, "--quiet", "-q", count=True, help="Decrease log verbosity."),
|
|
52
|
+
log_json: bool = typer.Option(False, "--log-json", help="Emit structured JSON logs."),
|
|
53
|
+
) -> None: # noqa: D401
|
|
54
|
+
logger = get_logger()
|
|
55
|
+
level_index = DEFAULT_VERBOSITY_INDEX + verbose - quiet
|
|
56
|
+
level_index = max(0, min(level_index, len(LOG_LEVEL_ORDER) - 1))
|
|
57
|
+
level_name = LOG_LEVEL_ORDER[level_index]
|
|
58
|
+
logger.configure(level=level_name, json_mode=log_json)
|
|
59
|
+
|
|
46
60
|
if ctx.invoked_subcommand is None:
|
|
47
61
|
_invoke("pydantic_fixturegen.cli.list:app", ctx)
|
|
48
62
|
raise typer.Exit()
|
|
@@ -71,6 +85,21 @@ _proxy(
|
|
|
71
85
|
"pydantic_fixturegen.cli.gen:app",
|
|
72
86
|
"Generate artifacts for discovered models.",
|
|
73
87
|
)
|
|
88
|
+
_proxy(
|
|
89
|
+
"diff",
|
|
90
|
+
"pydantic_fixturegen.cli.diff:app",
|
|
91
|
+
"Regenerate artifacts in-memory and compare against existing files.",
|
|
92
|
+
)
|
|
93
|
+
_proxy(
|
|
94
|
+
"check",
|
|
95
|
+
"pydantic_fixturegen.cli.check:app",
|
|
96
|
+
"Validate configuration, discovery, and emitter destinations without generating artifacts.",
|
|
97
|
+
)
|
|
98
|
+
_proxy(
|
|
99
|
+
"init",
|
|
100
|
+
"pydantic_fixturegen.cli.init:app",
|
|
101
|
+
"Scaffold configuration and directories for new projects.",
|
|
102
|
+
)
|
|
74
103
|
_proxy(
|
|
75
104
|
"doctor",
|
|
76
105
|
"pydantic_fixturegen.cli.doctor:app",
|
|
@@ -82,4 +111,6 @@ _proxy(
|
|
|
82
111
|
"Explain generation strategies per model field.",
|
|
83
112
|
)
|
|
84
113
|
|
|
114
|
+
app.add_typer(schema_cli.app, name="schema")
|
|
115
|
+
|
|
85
116
|
__all__ = ["app"]
|
|
@@ -0,0 +1,230 @@
|
|
|
1
|
+
"""CLI command for validating configuration and discovery without generating outputs."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
from collections.abc import Iterable
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
import typer
|
|
10
|
+
|
|
11
|
+
from pydantic_fixturegen.core.config import ConfigError, load_config
|
|
12
|
+
from pydantic_fixturegen.core.errors import DiscoveryError, PFGError
|
|
13
|
+
|
|
14
|
+
from .gen._common import ( # shared helpers
|
|
15
|
+
JSON_ERRORS_OPTION,
|
|
16
|
+
DiscoveryMethod,
|
|
17
|
+
clear_module_cache,
|
|
18
|
+
discover_models,
|
|
19
|
+
load_model_class,
|
|
20
|
+
render_cli_error,
|
|
21
|
+
split_patterns,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
PATH_ARGUMENT = typer.Argument(
|
|
25
|
+
...,
|
|
26
|
+
help="Python module path to validate discovery against.",
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
INCLUDE_OPTION = typer.Option(
|
|
30
|
+
None,
|
|
31
|
+
"--include",
|
|
32
|
+
"-i",
|
|
33
|
+
help="Comma-separated glob pattern(s) of fully-qualified model names to include.",
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
EXCLUDE_OPTION = typer.Option(
|
|
37
|
+
None,
|
|
38
|
+
"--exclude",
|
|
39
|
+
"-e",
|
|
40
|
+
help="Comma-separated glob pattern(s) of fully-qualified model names to exclude.",
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
AST_OPTION = typer.Option(False, "--ast", help="Use AST discovery only (no imports executed).")
|
|
44
|
+
|
|
45
|
+
HYBRID_OPTION = typer.Option(False, "--hybrid", help="Combine AST and safe import discovery.")
|
|
46
|
+
|
|
47
|
+
TIMEOUT_OPTION = typer.Option(
|
|
48
|
+
5.0,
|
|
49
|
+
"--timeout",
|
|
50
|
+
min=0.1,
|
|
51
|
+
help="Timeout in seconds for safe import execution.",
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
MEMORY_LIMIT_OPTION = typer.Option(
|
|
55
|
+
256,
|
|
56
|
+
"--memory-limit-mb",
|
|
57
|
+
min=1,
|
|
58
|
+
help="Memory limit in megabytes for safe import subprocess.",
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
JSON_OUT_OPTION = typer.Option(
|
|
62
|
+
None,
|
|
63
|
+
"--json-out",
|
|
64
|
+
help="Validate that the provided JSON/JSONL output path is writable.",
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
FIXTURES_OUT_OPTION = typer.Option(
|
|
68
|
+
None,
|
|
69
|
+
"--fixtures-out",
|
|
70
|
+
help="Validate that the provided pytest fixtures output path is writable.",
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
SCHEMA_OUT_OPTION = typer.Option(
|
|
74
|
+
None,
|
|
75
|
+
"--schema-out",
|
|
76
|
+
help="Validate that the provided JSON Schema output path is writable.",
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
app = typer.Typer(invoke_without_command=True, subcommand_metavar="")
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def check( # noqa: D401 - Typer callback
|
|
84
|
+
ctx: typer.Context,
|
|
85
|
+
path: str = PATH_ARGUMENT,
|
|
86
|
+
include: str | None = INCLUDE_OPTION,
|
|
87
|
+
exclude: str | None = EXCLUDE_OPTION,
|
|
88
|
+
ast_mode: bool = AST_OPTION,
|
|
89
|
+
hybrid_mode: bool = HYBRID_OPTION,
|
|
90
|
+
timeout: float = TIMEOUT_OPTION,
|
|
91
|
+
memory_limit_mb: int = MEMORY_LIMIT_OPTION,
|
|
92
|
+
json_out: Path | None = JSON_OUT_OPTION,
|
|
93
|
+
fixtures_out: Path | None = FIXTURES_OUT_OPTION,
|
|
94
|
+
schema_out: Path | None = SCHEMA_OUT_OPTION,
|
|
95
|
+
json_errors: bool = JSON_ERRORS_OPTION,
|
|
96
|
+
) -> None:
|
|
97
|
+
_ = ctx
|
|
98
|
+
try:
|
|
99
|
+
_execute_check(
|
|
100
|
+
target=path,
|
|
101
|
+
include=include,
|
|
102
|
+
exclude=exclude,
|
|
103
|
+
ast_mode=ast_mode,
|
|
104
|
+
hybrid_mode=hybrid_mode,
|
|
105
|
+
timeout=timeout,
|
|
106
|
+
memory_limit_mb=memory_limit_mb,
|
|
107
|
+
json_out=json_out,
|
|
108
|
+
fixtures_out=fixtures_out,
|
|
109
|
+
schema_out=schema_out,
|
|
110
|
+
)
|
|
111
|
+
except PFGError as exc:
|
|
112
|
+
render_cli_error(exc, json_errors=json_errors)
|
|
113
|
+
except ConfigError as exc:
|
|
114
|
+
render_cli_error(DiscoveryError(str(exc)), json_errors=json_errors)
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
app.callback(invoke_without_command=True)(check)
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def _execute_check(
|
|
121
|
+
*,
|
|
122
|
+
target: str,
|
|
123
|
+
include: str | None,
|
|
124
|
+
exclude: str | None,
|
|
125
|
+
ast_mode: bool,
|
|
126
|
+
hybrid_mode: bool,
|
|
127
|
+
timeout: float,
|
|
128
|
+
memory_limit_mb: int,
|
|
129
|
+
json_out: Path | None,
|
|
130
|
+
fixtures_out: Path | None,
|
|
131
|
+
schema_out: Path | None,
|
|
132
|
+
) -> None:
|
|
133
|
+
target_path = Path(target)
|
|
134
|
+
if not target_path.exists():
|
|
135
|
+
raise DiscoveryError(f"Target path '{target}' does not exist.", details={"path": target})
|
|
136
|
+
if not target_path.is_file():
|
|
137
|
+
raise DiscoveryError("Target must be a Python module file.", details={"path": target})
|
|
138
|
+
|
|
139
|
+
load_config(root=Path.cwd())
|
|
140
|
+
|
|
141
|
+
clear_module_cache()
|
|
142
|
+
|
|
143
|
+
method = _resolve_method(ast_mode, hybrid_mode)
|
|
144
|
+
discovery = discover_models(
|
|
145
|
+
target_path,
|
|
146
|
+
include=split_patterns(include),
|
|
147
|
+
exclude=split_patterns(exclude),
|
|
148
|
+
method=method,
|
|
149
|
+
timeout=timeout,
|
|
150
|
+
memory_limit_mb=memory_limit_mb,
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
if discovery.errors:
|
|
154
|
+
raise DiscoveryError("; ".join(discovery.errors))
|
|
155
|
+
|
|
156
|
+
for warning in discovery.warnings:
|
|
157
|
+
if warning.strip():
|
|
158
|
+
typer.secho(f"warning: {warning.strip()}", err=True, fg=typer.colors.YELLOW)
|
|
159
|
+
|
|
160
|
+
if not discovery.models:
|
|
161
|
+
raise DiscoveryError("No models discovered.")
|
|
162
|
+
|
|
163
|
+
for model_info in discovery.models:
|
|
164
|
+
try:
|
|
165
|
+
load_model_class(model_info)
|
|
166
|
+
except RuntimeError as exc:
|
|
167
|
+
raise DiscoveryError(str(exc)) from exc
|
|
168
|
+
|
|
169
|
+
_validate_output_targets(
|
|
170
|
+
[
|
|
171
|
+
(json_out, "JSON output"),
|
|
172
|
+
(fixtures_out, "pytest fixtures output"),
|
|
173
|
+
(schema_out, "schema output"),
|
|
174
|
+
]
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
typer.secho("Configuration OK", fg=typer.colors.GREEN)
|
|
178
|
+
typer.echo(f"Discovered {len(discovery.models)} model(s) for validation.")
|
|
179
|
+
|
|
180
|
+
if any(path is not None for path in (json_out, fixtures_out, schema_out)):
|
|
181
|
+
typer.echo("Emitter destinations verified.")
|
|
182
|
+
|
|
183
|
+
typer.echo("Check complete. No issues detected.")
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
def _resolve_method(ast_mode: bool, hybrid_mode: bool) -> DiscoveryMethod:
|
|
187
|
+
if ast_mode and hybrid_mode:
|
|
188
|
+
raise DiscoveryError("Choose only one of --ast or --hybrid.")
|
|
189
|
+
if hybrid_mode:
|
|
190
|
+
return "hybrid"
|
|
191
|
+
if ast_mode:
|
|
192
|
+
return "ast"
|
|
193
|
+
return "import"
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def _validate_output_targets(targets: Iterable[tuple[Path | None, str]]) -> None:
|
|
197
|
+
problems: list[str] = []
|
|
198
|
+
for path, label in targets:
|
|
199
|
+
if path is None:
|
|
200
|
+
continue
|
|
201
|
+
issues = _validate_output_path(Path(path), label)
|
|
202
|
+
problems.extend(issues)
|
|
203
|
+
|
|
204
|
+
if problems:
|
|
205
|
+
message = "; ".join(problems)
|
|
206
|
+
raise DiscoveryError(message)
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
def _validate_output_path(path: Path, label: str) -> list[str]:
|
|
210
|
+
issues: list[str] = []
|
|
211
|
+
if path.exists() and path.is_dir():
|
|
212
|
+
issues.append(f"{label} '{path}' points to a directory; expected a file path.")
|
|
213
|
+
return issues
|
|
214
|
+
|
|
215
|
+
parent = path.parent if path.parent != path else path
|
|
216
|
+
if not parent.exists():
|
|
217
|
+
issues.append(f"Parent directory for {label} '{parent}' does not exist.")
|
|
218
|
+
return issues
|
|
219
|
+
|
|
220
|
+
if not parent.is_dir():
|
|
221
|
+
issues.append(f"Parent path for {label} '{parent}' is not a directory.")
|
|
222
|
+
return issues
|
|
223
|
+
|
|
224
|
+
if not os.access(parent, os.W_OK):
|
|
225
|
+
issues.append(f"Parent directory for {label} '{parent}' is not writable.")
|
|
226
|
+
|
|
227
|
+
return issues
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
__all__ = ["app"]
|