pytest-jsonschema-snapshot 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pytest_jsonschema_snapshot/__init__.py +12 -0
- pytest_jsonschema_snapshot/core.py +260 -0
- pytest_jsonschema_snapshot/plugin.py +180 -0
- pytest_jsonschema_snapshot/py.typed +0 -0
- pytest_jsonschema_snapshot/stats.py +185 -0
- pytest_jsonschema_snapshot/tools/__init__.py +4 -0
- pytest_jsonschema_snapshot/tools/genson_addon/__init__.py +3 -0
- pytest_jsonschema_snapshot/tools/genson_addon/format_detector.py +52 -0
- pytest_jsonschema_snapshot/tools/genson_addon/to_schema_converter.py +119 -0
- pytest_jsonschema_snapshot/tools/name_maker.py +153 -0
- pytest_jsonschema_snapshot-0.2.0.dist-info/METADATA +195 -0
- pytest_jsonschema_snapshot-0.2.0.dist-info/RECORD +15 -0
- pytest_jsonschema_snapshot-0.2.0.dist-info/WHEEL +4 -0
- pytest_jsonschema_snapshot-0.2.0.dist-info/entry_points.txt +2 -0
- pytest_jsonschema_snapshot-0.2.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"""
|
|
2
|
+
pytest-typed-schema-shot
|
|
3
|
+
========================
|
|
4
|
+
|
|
5
|
+
Плагин для pytest, который автоматически генерирует JSON Schema
|
|
6
|
+
на основе примеров данных и проверяет соответствие данных сохраненным схемам.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from .core import SchemaShot
|
|
10
|
+
|
|
11
|
+
__version__ = "0.2.0"
|
|
12
|
+
__all__ = ["SchemaShot"]
|
|
@@ -0,0 +1,260 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Core logic of the plugin.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import logging
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import TYPE_CHECKING, Any, Callable, Optional, Set
|
|
9
|
+
|
|
10
|
+
import pathvalidate
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from jsonschema_diff import JsonSchemaDiff
|
|
14
|
+
|
|
15
|
+
import pytest
|
|
16
|
+
from jsonschema import FormatChecker, ValidationError, validate
|
|
17
|
+
|
|
18
|
+
from .stats import GLOBAL_STATS
|
|
19
|
+
from .tools import JsonToSchemaConverter, NameMaker
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class SchemaShot:
|
|
23
|
+
def __init__(
|
|
24
|
+
self,
|
|
25
|
+
root_dir: Path,
|
|
26
|
+
differ: "JsonSchemaDiff",
|
|
27
|
+
callable_regex: str = "{class_method=.}",
|
|
28
|
+
update_mode: bool = False,
|
|
29
|
+
save_original: bool = False,
|
|
30
|
+
debug_mode: bool = False,
|
|
31
|
+
snapshot_dir_name: str = "__snapshots__",
|
|
32
|
+
):
|
|
33
|
+
"""
|
|
34
|
+
Initializes SchemaShot.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
root_dir: Project root directory
|
|
38
|
+
update_mode: Update mode (--schema-update)
|
|
39
|
+
snapshot_dir_name: Name of the directory for snapshots
|
|
40
|
+
"""
|
|
41
|
+
self.root_dir: Path = root_dir
|
|
42
|
+
self.differ: "JsonSchemaDiff" = differ
|
|
43
|
+
self.callable_regex: str = callable_regex
|
|
44
|
+
self.update_mode: bool = update_mode
|
|
45
|
+
self.save_original: bool = save_original
|
|
46
|
+
self.debug_mode: bool = debug_mode
|
|
47
|
+
self.snapshot_dir: Path = root_dir / snapshot_dir_name
|
|
48
|
+
self.used_schemas: Set[str] = set()
|
|
49
|
+
|
|
50
|
+
self.logger = logging.getLogger(__name__)
|
|
51
|
+
# добавляем вывод в stderr
|
|
52
|
+
handler = logging.StreamHandler()
|
|
53
|
+
handler.setFormatter(logging.Formatter("%(levelname)s %(name)s: %(message)s"))
|
|
54
|
+
self.logger.addHandler(handler)
|
|
55
|
+
# и поднимаем уровень, чтобы INFO/DEBUG прошли через handler
|
|
56
|
+
self.logger.setLevel(logging.INFO)
|
|
57
|
+
|
|
58
|
+
# Создаем директорию для снэпшотов, если её нет
|
|
59
|
+
if not self.snapshot_dir.exists():
|
|
60
|
+
self.snapshot_dir.mkdir(parents=True)
|
|
61
|
+
|
|
62
|
+
def _process_name(self, name: str | int | Callable | list[str | int | Callable]) -> str:
|
|
63
|
+
"""
|
|
64
|
+
1. Converts callable to string
|
|
65
|
+
2. Checks for validity
|
|
66
|
+
|
|
67
|
+
Returns:
|
|
68
|
+
str
|
|
69
|
+
Raises:
|
|
70
|
+
ValueError
|
|
71
|
+
"""
|
|
72
|
+
|
|
73
|
+
__tracebackhide__ = not self.debug_mode # прячем из стека pytest
|
|
74
|
+
|
|
75
|
+
def process_name_part(part: str | int | Callable) -> str:
|
|
76
|
+
if callable(part):
|
|
77
|
+
return NameMaker.format(part, self.callable_regex)
|
|
78
|
+
else:
|
|
79
|
+
return str(part)
|
|
80
|
+
|
|
81
|
+
if isinstance(name, (list, tuple)):
|
|
82
|
+
name = ".".join([process_name_part(part) for part in name])
|
|
83
|
+
else:
|
|
84
|
+
name = process_name_part(name)
|
|
85
|
+
|
|
86
|
+
if not isinstance(name, str) or not name:
|
|
87
|
+
raise ValueError("Schema name must be a non-empty string")
|
|
88
|
+
|
|
89
|
+
try:
|
|
90
|
+
# auto подберёт правила под текущую ОС
|
|
91
|
+
pathvalidate.validate_filename(
|
|
92
|
+
name, platform="auto"
|
|
93
|
+
) # allow_reserved=False по умолчанию
|
|
94
|
+
except ValidationError as e:
|
|
95
|
+
raise ValueError(f"Invalid schema name: {e}") from None
|
|
96
|
+
|
|
97
|
+
return name
|
|
98
|
+
|
|
99
|
+
def _save_process_original(self, real_name: str, status: Optional[bool], data: dict) -> None:
|
|
100
|
+
json_name = f"{real_name}.json"
|
|
101
|
+
json_path = self.snapshot_dir / json_name
|
|
102
|
+
|
|
103
|
+
if self.save_original:
|
|
104
|
+
available_to_create = not json_path.exists() or status is None
|
|
105
|
+
available_to_update = status is True
|
|
106
|
+
|
|
107
|
+
if available_to_create or available_to_update:
|
|
108
|
+
with open(json_path, "w", encoding="utf-8") as f:
|
|
109
|
+
json.dump(data, f, indent=2, ensure_ascii=False)
|
|
110
|
+
|
|
111
|
+
if available_to_create:
|
|
112
|
+
GLOBAL_STATS.add_created(json_name)
|
|
113
|
+
elif available_to_update:
|
|
114
|
+
GLOBAL_STATS.add_updated(json_name)
|
|
115
|
+
else:
|
|
116
|
+
raise ValueError(f"Unexpected status: {status}")
|
|
117
|
+
elif json_path.exists():
|
|
118
|
+
# удаляем
|
|
119
|
+
json_path.unlink()
|
|
120
|
+
GLOBAL_STATS.add_deleted(json_name)
|
|
121
|
+
|
|
122
|
+
def assert_json_match(
|
|
123
|
+
self,
|
|
124
|
+
data: dict,
|
|
125
|
+
name: str | int | Callable | list[str | int | Callable],
|
|
126
|
+
) -> Optional[bool]:
|
|
127
|
+
"""
|
|
128
|
+
Asserts for JSON, converts it to schema and then compares.
|
|
129
|
+
|
|
130
|
+
Returns:
|
|
131
|
+
True – the schema has been updated,
|
|
132
|
+
False – the schema has not changed,
|
|
133
|
+
None – a new schema has been created.
|
|
134
|
+
"""
|
|
135
|
+
|
|
136
|
+
real_name = self._process_name(name)
|
|
137
|
+
|
|
138
|
+
builder = JsonToSchemaConverter()
|
|
139
|
+
builder.add_object(data)
|
|
140
|
+
current_schema = builder.to_schema()
|
|
141
|
+
|
|
142
|
+
real_name, status = self._base_match(data, current_schema, real_name)
|
|
143
|
+
|
|
144
|
+
if self.update_mode:
|
|
145
|
+
self._save_process_original(real_name=real_name, status=status, data=data)
|
|
146
|
+
|
|
147
|
+
return status
|
|
148
|
+
|
|
149
|
+
def assert_schema_match(
|
|
150
|
+
self,
|
|
151
|
+
schema: dict[str, Any],
|
|
152
|
+
name: str | int | Callable | list[str | int | Callable],
|
|
153
|
+
*,
|
|
154
|
+
data: Optional[dict] = None,
|
|
155
|
+
) -> Optional[bool]:
|
|
156
|
+
"""
|
|
157
|
+
Accepts a JSON-schema directly and compares it immediately.
|
|
158
|
+
|
|
159
|
+
Returns:
|
|
160
|
+
True – the schema has been updated,
|
|
161
|
+
False – the schema has not changed,
|
|
162
|
+
None – a new schema has been created.
|
|
163
|
+
"""
|
|
164
|
+
|
|
165
|
+
real_name = self._process_name(name)
|
|
166
|
+
|
|
167
|
+
real_name, status = self._base_match(data, schema, real_name)
|
|
168
|
+
|
|
169
|
+
if self.update_mode and data is not None:
|
|
170
|
+
self._save_process_original(real_name=real_name, status=status, data=data)
|
|
171
|
+
|
|
172
|
+
return status
|
|
173
|
+
|
|
174
|
+
def _base_match(
|
|
175
|
+
self,
|
|
176
|
+
data: Optional[dict],
|
|
177
|
+
current_schema: dict,
|
|
178
|
+
name: str,
|
|
179
|
+
) -> tuple[str, Optional[bool]]:
|
|
180
|
+
"""
|
|
181
|
+
Checks if data matches the JSON schema, creates/updates it if needed,
|
|
182
|
+
and writes statistics to GLOBAL_STATS.
|
|
183
|
+
|
|
184
|
+
Returns:
|
|
185
|
+
True – the schema has been updated,
|
|
186
|
+
False – the schema has not changed,
|
|
187
|
+
None – a new schema has been created.
|
|
188
|
+
"""
|
|
189
|
+
__tracebackhide__ = not self.debug_mode # прячем из стека pytest
|
|
190
|
+
|
|
191
|
+
# Проверка имени
|
|
192
|
+
name = self._process_name(name)
|
|
193
|
+
|
|
194
|
+
schema_path = self.snapshot_dir / f"{name}.schema.json"
|
|
195
|
+
self.used_schemas.add(schema_path.name)
|
|
196
|
+
|
|
197
|
+
# --- состояние ДО проверки ---
|
|
198
|
+
schema_exists_before = schema_path.exists()
|
|
199
|
+
|
|
200
|
+
# --- когда схемы ещё нет ---
|
|
201
|
+
if not schema_exists_before:
|
|
202
|
+
if not self.update_mode:
|
|
203
|
+
raise pytest.fail.Exception(
|
|
204
|
+
f"Schema `{name}` not found."
|
|
205
|
+
"Run the test with the --schema-update option to create it."
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
with open(schema_path, "w", encoding="utf-8") as f:
|
|
209
|
+
json.dump(current_schema, f, indent=2, ensure_ascii=False)
|
|
210
|
+
|
|
211
|
+
self.logger.info(f"New schema `{name}` has been created.")
|
|
212
|
+
GLOBAL_STATS.add_created(schema_path.name) # статистика «создана»
|
|
213
|
+
return name, None
|
|
214
|
+
else:
|
|
215
|
+
with open(schema_path, "r", encoding="utf-8") as f:
|
|
216
|
+
existing_schema = json.load(f)
|
|
217
|
+
|
|
218
|
+
# --- схема уже была: сравнение и валидация --------------------------------
|
|
219
|
+
schema_updated = False
|
|
220
|
+
|
|
221
|
+
if existing_schema != current_schema: # есть отличия
|
|
222
|
+
differences = self.differ.compare(dict(existing_schema), current_schema).render()
|
|
223
|
+
|
|
224
|
+
if self.update_mode:
|
|
225
|
+
GLOBAL_STATS.add_updated(schema_path.name, differences)
|
|
226
|
+
|
|
227
|
+
# обновляем файл
|
|
228
|
+
with open(schema_path, "w", encoding="utf-8") as f:
|
|
229
|
+
json.dump(current_schema, f, indent=2, ensure_ascii=False)
|
|
230
|
+
self.logger.warning(f"Schema `{name}` updated.\n\n{differences}")
|
|
231
|
+
schema_updated = True
|
|
232
|
+
elif data is not None:
|
|
233
|
+
GLOBAL_STATS.add_uncommitted(schema_path.name, differences)
|
|
234
|
+
|
|
235
|
+
# только валидируем по старой схеме
|
|
236
|
+
try:
|
|
237
|
+
validate(
|
|
238
|
+
instance=data,
|
|
239
|
+
schema=existing_schema,
|
|
240
|
+
format_checker=FormatChecker(),
|
|
241
|
+
)
|
|
242
|
+
except ValidationError as e:
|
|
243
|
+
pytest.fail(
|
|
244
|
+
f"\n\n{differences}\n\nValidation error in `{name}`: {e.message}"
|
|
245
|
+
)
|
|
246
|
+
elif data is not None:
|
|
247
|
+
# схемы совпали – всё равно валидируем на случай формальных ошибок
|
|
248
|
+
try:
|
|
249
|
+
validate(
|
|
250
|
+
instance=data,
|
|
251
|
+
schema=existing_schema,
|
|
252
|
+
format_checker=FormatChecker(),
|
|
253
|
+
)
|
|
254
|
+
except ValidationError as e:
|
|
255
|
+
differences = self.differ.compare(
|
|
256
|
+
dict(existing_schema), current_schema
|
|
257
|
+
).render()
|
|
258
|
+
pytest.fail(f"\n\n{differences}\n\nValidation error in `{name}`: {e.message}")
|
|
259
|
+
|
|
260
|
+
return name, schema_updated
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
from typing import Dict, Generator, Optional
|
|
3
|
+
|
|
4
|
+
import pytest
|
|
5
|
+
from jsonschema_diff import ConfigMaker, JsonSchemaDiff
|
|
6
|
+
from jsonschema_diff.color import HighlighterPipeline
|
|
7
|
+
from jsonschema_diff.color.stages import (
|
|
8
|
+
MonoLinesHighlighter,
|
|
9
|
+
PathHighlighter,
|
|
10
|
+
ReplaceGenericHighlighter,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
from .core import SchemaShot
|
|
14
|
+
from .stats import GLOBAL_STATS, SchemaStats
|
|
15
|
+
|
|
16
|
+
# Global storage of SchemaShot instances for different directories
|
|
17
|
+
_schema_managers: Dict[Path, SchemaShot] = {}
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def pytest_addoption(parser: pytest.Parser) -> None:
|
|
21
|
+
"""Adds --schema-update option to pytest."""
|
|
22
|
+
parser.addoption(
|
|
23
|
+
"--schema-update",
|
|
24
|
+
action="store_true",
|
|
25
|
+
help="Update or create JSON Schema files based on current data",
|
|
26
|
+
)
|
|
27
|
+
parser.addoption(
|
|
28
|
+
"--save-original",
|
|
29
|
+
action="store_true",
|
|
30
|
+
help="Save original JSON alongside schema (same name, but without `.schema` prefix)",
|
|
31
|
+
)
|
|
32
|
+
parser.addoption(
|
|
33
|
+
"--jsss-debug",
|
|
34
|
+
action="store_true",
|
|
35
|
+
help="Show internal exception stack (stops hiding them)",
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
parser.addini(
|
|
39
|
+
"jsss_dir",
|
|
40
|
+
default="__snapshots__",
|
|
41
|
+
help="Directory for storing schemas (default: __snapshots__)",
|
|
42
|
+
)
|
|
43
|
+
parser.addini(
|
|
44
|
+
"jsss_callable_regex",
|
|
45
|
+
default="{class_method=.}",
|
|
46
|
+
help="Regex for saving callable part of path",
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@pytest.fixture(scope="function")
|
|
51
|
+
def schemashot(request: pytest.FixtureRequest) -> Generator[SchemaShot, None, None]:
|
|
52
|
+
"""
|
|
53
|
+
Fixture providing a SchemaShot instance and gathering used schemas.
|
|
54
|
+
"""
|
|
55
|
+
|
|
56
|
+
# Получаем путь к тестовому файлу
|
|
57
|
+
test_path = Path(request.node.path if hasattr(request.node, "path") else request.node.fspath)
|
|
58
|
+
root_dir = test_path.parent
|
|
59
|
+
update_mode = bool(request.config.getoption("--schema-update"))
|
|
60
|
+
save_original = bool(request.config.getoption("--save-original"))
|
|
61
|
+
debug_mode = bool(request.config.getoption("--jsss-debug"))
|
|
62
|
+
|
|
63
|
+
# Получаем настраиваемую директорию для схем
|
|
64
|
+
schema_dir_name = str(request.config.getini("jsss_dir"))
|
|
65
|
+
callable_regex = str(request.config.getini("jsss_callable_regex"))
|
|
66
|
+
|
|
67
|
+
differ = JsonSchemaDiff(
|
|
68
|
+
ConfigMaker.make(),
|
|
69
|
+
HighlighterPipeline(
|
|
70
|
+
[MonoLinesHighlighter(), PathHighlighter(), ReplaceGenericHighlighter()]
|
|
71
|
+
),
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
# Создаем или получаем экземпляр SchemaShot для этой директории
|
|
75
|
+
if root_dir not in _schema_managers:
|
|
76
|
+
_schema_managers[root_dir] = SchemaShot(
|
|
77
|
+
root_dir,
|
|
78
|
+
differ,
|
|
79
|
+
callable_regex,
|
|
80
|
+
update_mode,
|
|
81
|
+
save_original,
|
|
82
|
+
debug_mode,
|
|
83
|
+
schema_dir_name,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
# Создаем локальный экземпляр для теста
|
|
87
|
+
yield _schema_managers[root_dir]
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
@pytest.hookimpl(trylast=True)
|
|
91
|
+
def pytest_unconfigure(config: pytest.Config) -> None:
|
|
92
|
+
"""
|
|
93
|
+
Hook that runs after all tests have finished.
|
|
94
|
+
Clears global variables.
|
|
95
|
+
"""
|
|
96
|
+
global GLOBAL_STATS
|
|
97
|
+
|
|
98
|
+
# Clear the dictionary
|
|
99
|
+
_schema_managers.clear()
|
|
100
|
+
# Reset stats for next run
|
|
101
|
+
GLOBAL_STATS = SchemaStats()
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
@pytest.hookimpl(trylast=True)
|
|
105
|
+
def pytest_terminal_summary(terminalreporter: pytest.TerminalReporter, exitstatus: int) -> None:
|
|
106
|
+
"""
|
|
107
|
+
Adds a summary about schemas to the final pytest report in the terminal.
|
|
108
|
+
"""
|
|
109
|
+
# Выполняем cleanup перед показом summary
|
|
110
|
+
if _schema_managers:
|
|
111
|
+
update_mode = bool(terminalreporter.config.getoption("--schema-update"))
|
|
112
|
+
|
|
113
|
+
# Вызываем метод очистки неиспользованных схем для каждого экземпляра
|
|
114
|
+
for _root_dir, manager in _schema_managers.items():
|
|
115
|
+
cleanup_unused_schemas(manager, update_mode, GLOBAL_STATS)
|
|
116
|
+
|
|
117
|
+
# Используем новую функцию для вывода статистики
|
|
118
|
+
update_mode = bool(terminalreporter.config.getoption("--schema-update"))
|
|
119
|
+
GLOBAL_STATS.print_summary(terminalreporter, update_mode)
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def cleanup_unused_schemas(
|
|
123
|
+
manager: SchemaShot, update_mode: bool, stats: Optional[SchemaStats] = None
|
|
124
|
+
) -> None:
|
|
125
|
+
"""
|
|
126
|
+
Deletes unused schemas in update mode and collects statistics.
|
|
127
|
+
Additionally, deletes the pair file `<name>.json` if it exists.
|
|
128
|
+
|
|
129
|
+
Args:
|
|
130
|
+
manager: SchemaShot instance
|
|
131
|
+
update_mode: Update mode
|
|
132
|
+
stats: Optional object for collecting statistics
|
|
133
|
+
"""
|
|
134
|
+
# Если директория снимков не существует, ничего не делаем
|
|
135
|
+
if not manager.snapshot_dir.exists():
|
|
136
|
+
return
|
|
137
|
+
|
|
138
|
+
# Перебираем все файлы схем
|
|
139
|
+
all_schemas = list(manager.snapshot_dir.glob("*.schema.json"))
|
|
140
|
+
|
|
141
|
+
for schema_file in all_schemas:
|
|
142
|
+
if schema_file.name not in manager.used_schemas:
|
|
143
|
+
if update_mode:
|
|
144
|
+
try:
|
|
145
|
+
# Удаляем саму схему
|
|
146
|
+
schema_file.unlink()
|
|
147
|
+
if stats:
|
|
148
|
+
stats.add_deleted(schema_file.name)
|
|
149
|
+
|
|
150
|
+
# Пытаемся удалить парный JSON: <name>.json
|
|
151
|
+
# Преобразуем "<name>.schema.json" -> "<name>.json"
|
|
152
|
+
base_name = schema_file.name[: -len(".schema.json")]
|
|
153
|
+
paired_json = schema_file.with_name(f"{base_name}.json")
|
|
154
|
+
if paired_json.exists():
|
|
155
|
+
try:
|
|
156
|
+
paired_json.unlink()
|
|
157
|
+
if stats:
|
|
158
|
+
stats.add_deleted(paired_json.name)
|
|
159
|
+
except OSError as e:
|
|
160
|
+
manager.logger.warning(
|
|
161
|
+
f"Failed to delete paired JSON for {schema_file.name}: {e}"
|
|
162
|
+
)
|
|
163
|
+
except Exception as e:
|
|
164
|
+
manager.logger.error(
|
|
165
|
+
f"Unexpected error deleting paired JSON for {schema_file.name}: {e}"
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
except OSError as e:
|
|
169
|
+
# Логируем ошибки удаления, но не прерываем работу
|
|
170
|
+
manager.logger.warning(
|
|
171
|
+
f"Failed to delete unused schema {schema_file.name}: {e}"
|
|
172
|
+
)
|
|
173
|
+
except Exception as e:
|
|
174
|
+
# Неожиданные ошибки тоже логируем
|
|
175
|
+
manager.logger.error(
|
|
176
|
+
f"Unexpected error deleting schema {schema_file.name}: {e}"
|
|
177
|
+
)
|
|
178
|
+
else:
|
|
179
|
+
if stats:
|
|
180
|
+
stats.add_unused(schema_file.name)
|
|
File without changes
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Module for collecting and displaying statistics about schemas.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import Dict, Generator, List, Optional
|
|
6
|
+
|
|
7
|
+
import pytest
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class SchemaStats:
|
|
11
|
+
"""Class for collecting and displaying statistics about schemas"""
|
|
12
|
+
|
|
13
|
+
def __init__(self) -> None:
|
|
14
|
+
self.created: List[str] = []
|
|
15
|
+
self.updated: List[str] = []
|
|
16
|
+
self.updated_diffs: Dict[str, str] = {} # schema_name -> diff
|
|
17
|
+
self.uncommitted: List[str] = [] # New category for uncommitted changes
|
|
18
|
+
self.uncommitted_diffs: Dict[str, str] = {} # schema_name -> diff
|
|
19
|
+
self.deleted: List[str] = []
|
|
20
|
+
self.unused: List[str] = []
|
|
21
|
+
|
|
22
|
+
def add_created(self, schema_name: str) -> None:
|
|
23
|
+
"""Adds created schema"""
|
|
24
|
+
self.created.append(schema_name)
|
|
25
|
+
|
|
26
|
+
def add_updated(self, schema_name: str, diff: Optional[str] = None) -> None:
|
|
27
|
+
"""Adds updated schema"""
|
|
28
|
+
# Generate diff if both schemas are provided
|
|
29
|
+
if diff and diff.strip():
|
|
30
|
+
self.updated.append(schema_name)
|
|
31
|
+
self.updated_diffs[schema_name] = diff
|
|
32
|
+
else:
|
|
33
|
+
# If schemas are not provided, assume it was an update
|
|
34
|
+
self.updated.append(schema_name)
|
|
35
|
+
|
|
36
|
+
def add_uncommitted(self, schema_name: str, diff: Optional[str] = None) -> None:
|
|
37
|
+
"""Adds schema with uncommitted changes"""
|
|
38
|
+
# Add only if there are real changes
|
|
39
|
+
if diff and diff.strip():
|
|
40
|
+
self.uncommitted.append(schema_name)
|
|
41
|
+
self.uncommitted_diffs[schema_name] = diff
|
|
42
|
+
|
|
43
|
+
def add_deleted(self, schema_name: str) -> None:
|
|
44
|
+
"""Adds deleted schema"""
|
|
45
|
+
self.deleted.append(schema_name)
|
|
46
|
+
|
|
47
|
+
def add_unused(self, schema_name: str) -> None:
|
|
48
|
+
"""Adds unused schema"""
|
|
49
|
+
self.unused.append(schema_name)
|
|
50
|
+
|
|
51
|
+
def has_changes(self) -> bool:
|
|
52
|
+
"""Returns True if any schema has changes"""
|
|
53
|
+
return bool(self.created or self.updated or self.deleted)
|
|
54
|
+
|
|
55
|
+
def has_any_info(self) -> bool:
|
|
56
|
+
"""Is there any information about schemas"""
|
|
57
|
+
return bool(self.created or self.updated or self.deleted or self.unused or self.uncommitted)
|
|
58
|
+
|
|
59
|
+
def __str__(self) -> str:
|
|
60
|
+
parts = []
|
|
61
|
+
if self.created:
|
|
62
|
+
parts.append(
|
|
63
|
+
f"Created schemas ({len(self.created)}): "
|
|
64
|
+
+ ", ".join(f"`{s}`" for s in self.created)
|
|
65
|
+
)
|
|
66
|
+
if self.updated:
|
|
67
|
+
parts.append(
|
|
68
|
+
f"Updated schemas ({len(self.updated)}): "
|
|
69
|
+
+ ", ".join(f"`{s}`" for s in self.updated)
|
|
70
|
+
)
|
|
71
|
+
if self.deleted:
|
|
72
|
+
parts.append(
|
|
73
|
+
f"Deleted schemas ({len(self.deleted)}): "
|
|
74
|
+
+ ", ".join(f"`{s}`" for s in self.deleted)
|
|
75
|
+
)
|
|
76
|
+
if self.unused:
|
|
77
|
+
parts.append(
|
|
78
|
+
f"Unused schemas ({len(self.unused)}): " + ", ".join(f"`{s}`" for s in self.unused)
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
return "\n".join(parts)
|
|
82
|
+
|
|
83
|
+
def print_summary(self, terminalreporter: pytest.TerminalReporter, update_mode: bool) -> None:
|
|
84
|
+
"""
|
|
85
|
+
Prints schema summary to pytest terminal output.
|
|
86
|
+
Pairs of "<name>.schema.json" + "<name>.json" are merged into one line:
|
|
87
|
+
"<name>.schema.json + original" (if original is present).
|
|
88
|
+
"""
|
|
89
|
+
|
|
90
|
+
def _iter_merged(names: List[str]) -> Generator[tuple[str, Optional[str]], None, None]:
|
|
91
|
+
"""
|
|
92
|
+
Iterates over (display, schema_key):
|
|
93
|
+
- display: string to display (may have " + original")
|
|
94
|
+
- schema_key: file name of the schema (<name>.schema.json) to find diffs,
|
|
95
|
+
or None if it's not a schema.
|
|
96
|
+
Preserves the original list order: merging happens at .schema.json
|
|
97
|
+
position; single .json outputs are left as is.
|
|
98
|
+
"""
|
|
99
|
+
names = list(names) # порядок важен
|
|
100
|
+
schema_sfx = ".schema.json"
|
|
101
|
+
json_sfx = ".json"
|
|
102
|
+
|
|
103
|
+
# множество баз, где имеются схемы/оригиналы
|
|
104
|
+
bases_with_schema = {n[: -len(schema_sfx)] for n in names if n.endswith(schema_sfx)}
|
|
105
|
+
bases_with_original = {
|
|
106
|
+
n[: -len(json_sfx)]
|
|
107
|
+
for n in names
|
|
108
|
+
if n.endswith(json_sfx) and not n.endswith(schema_sfx)
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
for n in names:
|
|
112
|
+
if n.endswith(schema_sfx):
|
|
113
|
+
base = n[: -len(schema_sfx)]
|
|
114
|
+
if base in bases_with_original:
|
|
115
|
+
yield f"{n} + original", n # display, schema_key
|
|
116
|
+
else:
|
|
117
|
+
yield n, n
|
|
118
|
+
elif n.endswith(json_sfx) and not n.endswith(schema_sfx):
|
|
119
|
+
base = n[: -len(json_sfx)]
|
|
120
|
+
# если есть парная схема — .json не выводим отдельно
|
|
121
|
+
if base in bases_with_schema:
|
|
122
|
+
continue
|
|
123
|
+
yield n, None
|
|
124
|
+
else:
|
|
125
|
+
# на всякий случай — прочие имена
|
|
126
|
+
yield n, n
|
|
127
|
+
|
|
128
|
+
if not self.has_any_info():
|
|
129
|
+
return
|
|
130
|
+
|
|
131
|
+
terminalreporter.write_sep("=", "Schema Summary")
|
|
132
|
+
|
|
133
|
+
# Created
|
|
134
|
+
if self.created:
|
|
135
|
+
terminalreporter.write_line(f"Created schemas ({len(self.created)}):", green=True)
|
|
136
|
+
for display, _key in _iter_merged(self.created):
|
|
137
|
+
terminalreporter.write_line(f" - {display}", green=True)
|
|
138
|
+
|
|
139
|
+
# Updated
|
|
140
|
+
if self.updated:
|
|
141
|
+
terminalreporter.write_line(f"Updated schemas ({len(self.updated)}):", yellow=True)
|
|
142
|
+
for display, key in _iter_merged(self.updated):
|
|
143
|
+
terminalreporter.write_line(f" - {display}", yellow=True)
|
|
144
|
+
# Показываем diff, если он есть под ключом схемы (.schema.json)
|
|
145
|
+
if key and key in self.updated_diffs:
|
|
146
|
+
terminalreporter.write_line(" Changes:", yellow=True)
|
|
147
|
+
for line in self.updated_diffs[key].split("\n"):
|
|
148
|
+
if line.strip():
|
|
149
|
+
terminalreporter.write_line(f" {line}")
|
|
150
|
+
terminalreporter.write_line("") # разделение
|
|
151
|
+
elif key:
|
|
152
|
+
terminalreporter.write_line(
|
|
153
|
+
" (Schema unchanged - no differences detected)", cyan=True
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
# Uncommitted
|
|
157
|
+
if self.uncommitted:
|
|
158
|
+
terminalreporter.write_line(
|
|
159
|
+
f"Uncommitted minor updates ({len(self.uncommitted)}):", bold=True
|
|
160
|
+
)
|
|
161
|
+
for display, key in _iter_merged(self.uncommitted):
|
|
162
|
+
terminalreporter.write_line(f" - {display}", cyan=True)
|
|
163
|
+
if key and key in self.uncommitted_diffs:
|
|
164
|
+
terminalreporter.write_line(" Detected changes:", cyan=True)
|
|
165
|
+
for line in self.uncommitted_diffs[key].split("\n"):
|
|
166
|
+
if line.strip():
|
|
167
|
+
terminalreporter.write_line(f" {line}")
|
|
168
|
+
terminalreporter.write_line("") # разделение
|
|
169
|
+
terminalreporter.write_line("Use --schema-update to commit these changes", cyan=True)
|
|
170
|
+
|
|
171
|
+
# Deleted
|
|
172
|
+
if self.deleted:
|
|
173
|
+
terminalreporter.write_line(f"Deleted schemas ({len(self.deleted)}):", red=True)
|
|
174
|
+
for display, _key in _iter_merged(self.deleted):
|
|
175
|
+
terminalreporter.write_line(f" - {display}", red=True)
|
|
176
|
+
|
|
177
|
+
# Unused (только если не update_mode)
|
|
178
|
+
if self.unused and not update_mode:
|
|
179
|
+
terminalreporter.write_line(f"Unused schemas ({len(self.unused)}):")
|
|
180
|
+
for display, _key in _iter_merged(self.unused):
|
|
181
|
+
terminalreporter.write_line(f" - {display}")
|
|
182
|
+
terminalreporter.write_line("Use --schema-update to delete unused schemas", yellow=True)
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
GLOBAL_STATS = SchemaStats()
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from typing import Optional
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class FormatDetector:
|
|
6
|
+
"""Class for detecting string formats"""
|
|
7
|
+
|
|
8
|
+
# Regular expressions for various formats
|
|
9
|
+
EMAIL_PATTERN = re.compile(r"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$")
|
|
10
|
+
UUID_PATTERN = re.compile(
|
|
11
|
+
r"^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$",
|
|
12
|
+
re.I,
|
|
13
|
+
)
|
|
14
|
+
DATE_PATTERN = re.compile(r"^\d{4}-\d{2}-\d{2}$")
|
|
15
|
+
DATETIME_PATTERN = re.compile(
|
|
16
|
+
r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?(Z|[+-]\d{2}:\d{2})$"
|
|
17
|
+
)
|
|
18
|
+
URI_PATTERN = re.compile(r"^https?://[^\s/$.?#].[^\s]*$", re.I)
|
|
19
|
+
IPV4_PATTERN = re.compile(
|
|
20
|
+
r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}"
|
|
21
|
+
r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
@classmethod
|
|
25
|
+
def detect_format(cls, value: str) -> Optional[str]:
|
|
26
|
+
"""
|
|
27
|
+
Detects the format of a string.
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
value: The string to analyze
|
|
31
|
+
|
|
32
|
+
Returns:
|
|
33
|
+
The name of the format or None if the format is not defined
|
|
34
|
+
"""
|
|
35
|
+
if not isinstance(value, str) or not value:
|
|
36
|
+
return None
|
|
37
|
+
|
|
38
|
+
# Check formats from more specific to less specific
|
|
39
|
+
if cls.EMAIL_PATTERN.match(value):
|
|
40
|
+
return "email"
|
|
41
|
+
elif cls.UUID_PATTERN.match(value):
|
|
42
|
+
return "uuid"
|
|
43
|
+
elif cls.DATETIME_PATTERN.match(value):
|
|
44
|
+
return "date-time"
|
|
45
|
+
elif cls.DATE_PATTERN.match(value):
|
|
46
|
+
return "date"
|
|
47
|
+
elif cls.URI_PATTERN.match(value):
|
|
48
|
+
return "uri"
|
|
49
|
+
elif cls.IPV4_PATTERN.match(value):
|
|
50
|
+
return "ipv4"
|
|
51
|
+
|
|
52
|
+
return None
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Module for advanced JSON Schema generation with format detection support.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import Any, Dict, Optional
|
|
6
|
+
|
|
7
|
+
from genson import SchemaBuilder # type: ignore[import-untyped]
|
|
8
|
+
|
|
9
|
+
from .format_detector import FormatDetector
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class FormatAwareString:
|
|
13
|
+
"""Strategy for strings with format detection"""
|
|
14
|
+
|
|
15
|
+
def __init__(self) -> None:
|
|
16
|
+
self.formats: set = set()
|
|
17
|
+
|
|
18
|
+
def match_schema(self, obj: Any) -> bool:
|
|
19
|
+
"""Checks if the object matches this strategy"""
|
|
20
|
+
return isinstance(obj, str)
|
|
21
|
+
|
|
22
|
+
def match_object(self, obj: Any) -> bool:
|
|
23
|
+
"""Checks if the object matches this strategy"""
|
|
24
|
+
return isinstance(obj, str)
|
|
25
|
+
|
|
26
|
+
def add_object(self, obj: Any) -> None:
|
|
27
|
+
"""Adds an object for analysis"""
|
|
28
|
+
if isinstance(obj, str):
|
|
29
|
+
detected_format = FormatDetector.detect_format(obj)
|
|
30
|
+
if detected_format:
|
|
31
|
+
self.formats.add(detected_format)
|
|
32
|
+
|
|
33
|
+
def to_schema(self) -> Dict[str, Any]:
|
|
34
|
+
"""Generates a schema for the string"""
|
|
35
|
+
schema = {"type": "string"}
|
|
36
|
+
|
|
37
|
+
# If all strings have the same format, add it to the schema
|
|
38
|
+
if len(self.formats) == 1:
|
|
39
|
+
schema["format"] = list(self.formats)[0]
|
|
40
|
+
|
|
41
|
+
return schema
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class JsonToSchemaConverter(SchemaBuilder):
|
|
45
|
+
"""Extended SchemaBuilder with format detection support"""
|
|
46
|
+
|
|
47
|
+
def __init__(self, schema_uri: Optional[str] = None):
|
|
48
|
+
if schema_uri:
|
|
49
|
+
super().__init__(schema_uri)
|
|
50
|
+
else:
|
|
51
|
+
super().__init__()
|
|
52
|
+
self._format_cache: Dict[str, set] = {}
|
|
53
|
+
|
|
54
|
+
def add_object(self, obj: Any, path: str = "root") -> None:
|
|
55
|
+
"""
|
|
56
|
+
Adds an object to the builder with format detection.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
obj: Object to add
|
|
60
|
+
path: Path to the object (for internal use)
|
|
61
|
+
"""
|
|
62
|
+
# Call the parent method first
|
|
63
|
+
super().add_object(obj)
|
|
64
|
+
|
|
65
|
+
# Then process the formats
|
|
66
|
+
self._process_formats(obj, path)
|
|
67
|
+
|
|
68
|
+
def _process_formats(self, obj: Any, path: str) -> None:
|
|
69
|
+
"""Recursively processes the object for format detection"""
|
|
70
|
+
if isinstance(obj, str):
|
|
71
|
+
# Detect the format of the string
|
|
72
|
+
detected_format = FormatDetector.detect_format(obj)
|
|
73
|
+
if detected_format:
|
|
74
|
+
if path not in self._format_cache:
|
|
75
|
+
self._format_cache[path] = set()
|
|
76
|
+
self._format_cache[path].add(detected_format)
|
|
77
|
+
elif isinstance(obj, dict):
|
|
78
|
+
# Recursively process the dictionary
|
|
79
|
+
for key, value in obj.items():
|
|
80
|
+
self._process_formats(value, f"{path}.{key}")
|
|
81
|
+
elif isinstance(obj, (list, tuple)):
|
|
82
|
+
# Recursively process the list
|
|
83
|
+
for i, item in enumerate(obj):
|
|
84
|
+
self._process_formats(item, f"{path}[{i}]")
|
|
85
|
+
|
|
86
|
+
def to_schema(self) -> Dict:
|
|
87
|
+
"""Generates the schema with format detection"""
|
|
88
|
+
# Get the base schema
|
|
89
|
+
schema = dict(super().to_schema())
|
|
90
|
+
|
|
91
|
+
# Add the formats
|
|
92
|
+
self._add_formats_to_schema(schema, "root")
|
|
93
|
+
|
|
94
|
+
return schema
|
|
95
|
+
|
|
96
|
+
def _add_formats_to_schema(self, schema: Dict[str, Any], path: str) -> None:
|
|
97
|
+
"""Recursively adds formats to the schema"""
|
|
98
|
+
if schema.get("type") == "string":
|
|
99
|
+
# If there is only one format for this path
|
|
100
|
+
if path in self._format_cache and len(self._format_cache[path]) == 1:
|
|
101
|
+
schema["format"] = list(self._format_cache[path])[0]
|
|
102
|
+
|
|
103
|
+
elif schema.get("type") == "object" and "properties" in schema:
|
|
104
|
+
# Recursively process the object properties
|
|
105
|
+
for prop_name, prop_schema in schema["properties"].items():
|
|
106
|
+
self._add_formats_to_schema(prop_schema, f"{path}.{prop_name}")
|
|
107
|
+
|
|
108
|
+
elif schema.get("type") == "array" and "items" in schema:
|
|
109
|
+
# Process the array items
|
|
110
|
+
if isinstance(schema["items"], dict):
|
|
111
|
+
self._add_formats_to_schema(schema["items"], f"{path}[0]")
|
|
112
|
+
elif isinstance(schema["items"], list):
|
|
113
|
+
for i, item_schema in enumerate(schema["items"]):
|
|
114
|
+
self._add_formats_to_schema(item_schema, f"{path}[{i}]")
|
|
115
|
+
|
|
116
|
+
elif "anyOf" in schema:
|
|
117
|
+
# Process the anyOf schemas
|
|
118
|
+
for i, sub_schema in enumerate(schema["anyOf"]):
|
|
119
|
+
self._add_formats_to_schema(sub_schema, path)
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import inspect
|
|
4
|
+
import re
|
|
5
|
+
import types
|
|
6
|
+
from functools import partial
|
|
7
|
+
from typing import Callable, List, Optional, TypedDict
|
|
8
|
+
|
|
9
|
+
# ──────────────────────────── Типы ────────────────────────────
|
|
10
|
+
_Meta = TypedDict(
|
|
11
|
+
"_Meta",
|
|
12
|
+
{
|
|
13
|
+
"package": str,
|
|
14
|
+
"package_full": str,
|
|
15
|
+
"path_parts": List[str],
|
|
16
|
+
"class": Optional[str],
|
|
17
|
+
"method": str,
|
|
18
|
+
},
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
# ──────────────────────────── Класс ───────────────────────────
|
|
23
|
+
class NameMaker:
|
|
24
|
+
"""
|
|
25
|
+
Lightweight helper that converts a callable into a string identifier
|
|
26
|
+
using a tiny placeholder-based template language (keeps backward
|
|
27
|
+
compatibility with the original test-suite).
|
|
28
|
+
|
|
29
|
+
Supported placeholders
|
|
30
|
+
----------------------
|
|
31
|
+
|
|
32
|
+
`{package}` – full module path (``tests.test_mod``)
|
|
33
|
+
|
|
34
|
+
`{package_full=SEP}` – same but with custom separator (default “.”)
|
|
35
|
+
|
|
36
|
+
`{path} / {path=SEP}` – module path *without* the first segment
|
|
37
|
+
|
|
38
|
+
`{class}` – class name or empty string
|
|
39
|
+
|
|
40
|
+
`{method}` – function / method name
|
|
41
|
+
|
|
42
|
+
`{class_method} / {...=SEP}` – ``Class{SEP}method`` or just ``method``
|
|
43
|
+
|
|
44
|
+
Unknown placeholders collapse to an empty string.
|
|
45
|
+
|
|
46
|
+
After substitution:
|
|
47
|
+
* “//”, “..”, “--” are collapsed to “/”, “.”, “-” respectively;
|
|
48
|
+
* double underscores **are preserved** so ``__call__`` stays intact.
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
_RE_PLHDR = re.compile(r"\{([^{}]+)\}")
|
|
52
|
+
|
|
53
|
+
# ───────────────────────────── PUBLIC ──────────────────────────────
|
|
54
|
+
@staticmethod
|
|
55
|
+
def format(obj: Callable[..., object], rule: str) -> str:
|
|
56
|
+
"""
|
|
57
|
+
Render *rule* using metadata extracted from *obj*.
|
|
58
|
+
"""
|
|
59
|
+
meta: _Meta = NameMaker._meta(obj)
|
|
60
|
+
|
|
61
|
+
def _sub(match: re.Match[str]) -> str: # noqa: N802
|
|
62
|
+
token: str | None = match.group(1)
|
|
63
|
+
name: str = token.split("=", 1)[0] if token else ""
|
|
64
|
+
joiner: str | None = token.split("=", 1)[1] if token and "=" in token else None
|
|
65
|
+
return NameMaker._expand(name, joiner, meta)
|
|
66
|
+
|
|
67
|
+
out = NameMaker._RE_PLHDR.sub(_sub, rule)
|
|
68
|
+
return NameMaker._collapse(out)
|
|
69
|
+
|
|
70
|
+
# ──────────────────────────── INTERNAL ────────────────────────────
|
|
71
|
+
# metadata ----------------------------------------------------------
|
|
72
|
+
@staticmethod
|
|
73
|
+
def _unwrap(obj: Callable[..., object]) -> Callable[..., object]:
|
|
74
|
+
"""Strip functools.partial and @functools.wraps wrappers."""
|
|
75
|
+
while True:
|
|
76
|
+
if isinstance(obj, partial):
|
|
77
|
+
obj = obj.func
|
|
78
|
+
continue
|
|
79
|
+
if hasattr(obj, "__wrapped__"):
|
|
80
|
+
obj = obj.__wrapped__
|
|
81
|
+
continue
|
|
82
|
+
break
|
|
83
|
+
return obj
|
|
84
|
+
|
|
85
|
+
@staticmethod
|
|
86
|
+
def _meta(obj: Callable[..., object]) -> _Meta:
|
|
87
|
+
"""Return mapping used during placeholder substitution."""
|
|
88
|
+
obj = NameMaker._unwrap(obj)
|
|
89
|
+
|
|
90
|
+
# 1) built-in function (len, sum, …)
|
|
91
|
+
if inspect.isbuiltin(obj) or isinstance(obj, types.BuiltinFunctionType):
|
|
92
|
+
qualname = obj.__name__
|
|
93
|
+
module = obj.__module__ or "builtins"
|
|
94
|
+
|
|
95
|
+
# 2) callable instance (defines __call__)
|
|
96
|
+
elif not (inspect.isfunction(obj) or inspect.ismethod(obj)):
|
|
97
|
+
qualname = f"{obj.__class__.__qualname__}.__call__"
|
|
98
|
+
module = obj.__class__.__module__
|
|
99
|
+
|
|
100
|
+
# 3) regular function / bound or unbound method
|
|
101
|
+
else:
|
|
102
|
+
qualname = obj.__qualname__
|
|
103
|
+
module = obj.__module__
|
|
104
|
+
|
|
105
|
+
parts: List[str] = qualname.split(".")
|
|
106
|
+
cls: Optional[str] = None
|
|
107
|
+
if len(parts) > 1 and parts[-2] != "<locals>":
|
|
108
|
+
cls = parts[-2]
|
|
109
|
+
method = parts[-1]
|
|
110
|
+
|
|
111
|
+
mod_parts = (module or "").split(".")
|
|
112
|
+
return {
|
|
113
|
+
"package": module,
|
|
114
|
+
"package_full": module,
|
|
115
|
+
"path_parts": mod_parts[1:] if len(mod_parts) > 1 else [],
|
|
116
|
+
"class": cls,
|
|
117
|
+
"method": method,
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
# placeholders ------------------------------------------------------
|
|
121
|
+
@staticmethod
|
|
122
|
+
def _expand(name: str, joiner: Optional[str], m: _Meta) -> str:
|
|
123
|
+
if name == "package":
|
|
124
|
+
return m["package"]
|
|
125
|
+
if name == "package_full":
|
|
126
|
+
sep = joiner if joiner is not None else "."
|
|
127
|
+
return sep.join(m["package_full"].split("."))
|
|
128
|
+
if name == "path":
|
|
129
|
+
if not m["path_parts"]:
|
|
130
|
+
return ""
|
|
131
|
+
sep = joiner if joiner is not None else "/"
|
|
132
|
+
return sep.join(m["path_parts"])
|
|
133
|
+
if name == "class":
|
|
134
|
+
return m["class"] or ""
|
|
135
|
+
if name == "method":
|
|
136
|
+
return m["method"]
|
|
137
|
+
if name == "class_method":
|
|
138
|
+
sep = joiner if joiner is not None else "."
|
|
139
|
+
cls_name = m["class"]
|
|
140
|
+
if cls_name:
|
|
141
|
+
return sep.join([cls_name, m["method"]])
|
|
142
|
+
return m["method"]
|
|
143
|
+
# unknown placeholder → empty
|
|
144
|
+
return ""
|
|
145
|
+
|
|
146
|
+
# post-processing ---------------------------------------------------
|
|
147
|
+
@staticmethod
|
|
148
|
+
def _collapse(s: str) -> str:
|
|
149
|
+
# collapse critical duplicates but keep double underscores
|
|
150
|
+
s = re.sub(r"/{2,}", "/", s) # '//' → '/'
|
|
151
|
+
s = re.sub(r"\.{2,}", ".", s) # '..' → '.'
|
|
152
|
+
s = re.sub(r"-{2,}", "-", s) # '--' → '-'
|
|
153
|
+
return s
|
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: pytest-jsonschema-snapshot
|
|
3
|
+
Version: 0.2.0
|
|
4
|
+
Summary: Pytest plugin for automatic JSON Schema generation and validation from examples
|
|
5
|
+
Project-URL: Homepage, https://miskler.github.io/pytest-jsonschema-snapshot/basic/quick_start.html
|
|
6
|
+
Project-URL: Repository, https://github.com/Miskler/pytest-jsonschema-snapshot
|
|
7
|
+
Project-URL: Documentation, https://miskler.github.io/pytest-jsonschema-snapshot/basic/quick_start.html
|
|
8
|
+
Project-URL: Issues, https://github.com/Miskler/pytest-jsonschema-snapshot/issues
|
|
9
|
+
Author: Miskler
|
|
10
|
+
License: MIT
|
|
11
|
+
License-File: LICENSE
|
|
12
|
+
Keywords: genson,json,pytest,rich,schema,validation
|
|
13
|
+
Classifier: Intended Audience :: Developers
|
|
14
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
15
|
+
Classifier: Operating System :: Microsoft :: Windows
|
|
16
|
+
Classifier: Operating System :: POSIX :: Linux
|
|
17
|
+
Classifier: Programming Language :: Python :: 3
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
22
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
23
|
+
Classifier: Topic :: Utilities
|
|
24
|
+
Requires-Python: >=3.10
|
|
25
|
+
Requires-Dist: genson
|
|
26
|
+
Requires-Dist: jsonschema
|
|
27
|
+
Requires-Dist: jsonschema-diff
|
|
28
|
+
Requires-Dist: pathvalidate
|
|
29
|
+
Requires-Dist: pytest
|
|
30
|
+
Provides-Extra: dev
|
|
31
|
+
Requires-Dist: black; extra == 'dev'
|
|
32
|
+
Requires-Dist: docutils; extra == 'dev'
|
|
33
|
+
Requires-Dist: flake8; extra == 'dev'
|
|
34
|
+
Requires-Dist: furo; extra == 'dev'
|
|
35
|
+
Requires-Dist: isort; extra == 'dev'
|
|
36
|
+
Requires-Dist: mypy; extra == 'dev'
|
|
37
|
+
Requires-Dist: pytest-asyncio; extra == 'dev'
|
|
38
|
+
Requires-Dist: pytest-cov; extra == 'dev'
|
|
39
|
+
Requires-Dist: pytest-xdist; extra == 'dev'
|
|
40
|
+
Requires-Dist: sphinx; extra == 'dev'
|
|
41
|
+
Requires-Dist: sphinx-autoapi; extra == 'dev'
|
|
42
|
+
Requires-Dist: types-docutils; extra == 'dev'
|
|
43
|
+
Requires-Dist: types-jsonschema; extra == 'dev'
|
|
44
|
+
Description-Content-Type: text/markdown
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
<div align="center">
|
|
48
|
+
|
|
49
|
+
# 🔍 Pytest JsonSchema SnapShot (JSSS)
|
|
50
|
+
|
|
51
|
+
<img src="https://raw.githubusercontent.com/Miskler/pytest-jsonschema-snapshot/refs/heads/main/assets/logo.png" width="70%" alt="logo.png" />
|
|
52
|
+
|
|
53
|
+
***Plugin for pytest that automatically / manually generates JSON Schemas tests with validates data.***
|
|
54
|
+
|
|
55
|
+
[](https://miskler.github.io/pytest-jsonschema-snapshot/tests/tests-report.html)
|
|
56
|
+
[](https://miskler.github.io/pytest-jsonschema-snapshot/coverage/)
|
|
57
|
+
[](https://python.org)
|
|
58
|
+
[](https://pypi.org/project/pytest-jsonschema-snapshot/)
|
|
59
|
+
[](LICENSE)
|
|
60
|
+
[](https://github.com/psf/black)
|
|
61
|
+
[](https://mypy.readthedocs.io/en/stable/index.html)
|
|
62
|
+
[](https://discord.gg/UnJnGHNbBp)
|
|
63
|
+
[](https://t.me/miskler_dev)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
**[⭐ Star us on GitHub](https://github.com/Miskler/pytest-jsonschema-snapshot)** | **[📚 Read the Docs](https://miskler.github.io/pytest-jsonschema-snapshot/basic/quick_start.html)** | **[🐛 Report Bug](https://github.com/Miskler/pytest-jsonschema-snapshot/issues)**
|
|
67
|
+
|
|
68
|
+
## ✨ Features
|
|
69
|
+
|
|
70
|
+
</div>
|
|
71
|
+
|
|
72
|
+

|
|
73
|
+
|
|
74
|
+
* Automatic JSON Schema generation from data examples (using the `genson` library).
|
|
75
|
+
* **Format detection**: Automatic detection and validation of string formats (email, UUID, date, date-time, URI, IPv4).
|
|
76
|
+
* Schema storage and management.
|
|
77
|
+
* Validation of data against saved schemas.
|
|
78
|
+
* Schema update via `--schema-update` (create new schemas, remove unused ones, update existing).
|
|
79
|
+
* Support for both `async` and synchronous functions.
|
|
80
|
+
* Support for `Union` types and optional fields.
|
|
81
|
+
* Built-in diff comparison of changes via [jsonschema-diff](https://github.com/Miskler/jsonschema-diff).
|
|
82
|
+
|
|
83
|
+
<div align="center">
|
|
84
|
+
|
|
85
|
+
## 🚀 Quick Start
|
|
86
|
+
|
|
87
|
+
</div>
|
|
88
|
+
|
|
89
|
+
### Installation
|
|
90
|
+
|
|
91
|
+
```bash
|
|
92
|
+
pip install pytest-jsonschema-snapshot
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
### Usage
|
|
96
|
+
|
|
97
|
+
1. Use the `schemashot` fixture in your tests
|
|
98
|
+
```python
|
|
99
|
+
from you_lib import API
|
|
100
|
+
from typed_schema_shot import SchemaShot
|
|
101
|
+
|
|
102
|
+
@pytest.mark.asyncio
|
|
103
|
+
async def test_something(schemashot: SchemaShot):
|
|
104
|
+
data = await API.get_data()
|
|
105
|
+
# There are data - need to validate through the schema
|
|
106
|
+
schemashot.assert_json_match(
|
|
107
|
+
data, # data for validation / convert to schema
|
|
108
|
+
"test_name" # name of the schema
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
schema = await API.get_schema()
|
|
112
|
+
# There is a schema (data is optional) - validate by what is
|
|
113
|
+
schemashot.assert_schema_match(
|
|
114
|
+
schema,
|
|
115
|
+
(API.get_schema, "test_name", 1) # == `API.get_schema.test_name.1` filename
|
|
116
|
+
data=data # data for validation (optional)
|
|
117
|
+
)
|
|
118
|
+
```
|
|
119
|
+
|
|
120
|
+
2. On first run, generate schemas with the `--schema-update` flag
|
|
121
|
+
```bash
|
|
122
|
+
pytest --schema-update --save-original
|
|
123
|
+
```
|
|
124
|
+
|
|
125
|
+
**--save-original**: save the original data on which the validation was performed. Saving occurs when `--schema-update`, if you run the schema update without this attribute, the old original data will be deleted without saving new ones.
|
|
126
|
+
|
|
127
|
+
3. On subsequent runs, tests will validate data against saved schemas
|
|
128
|
+
```bash
|
|
129
|
+
pytest
|
|
130
|
+
```
|
|
131
|
+
|
|
132
|
+
<div align="center">
|
|
133
|
+
|
|
134
|
+
## 👀 Key Capabilities
|
|
135
|
+
|
|
136
|
+
</div>
|
|
137
|
+
|
|
138
|
+
* **Union Types**: support multiple possible types for fields
|
|
139
|
+
* **Optional Fields**: automatic detection of required and optional fields
|
|
140
|
+
* **Format Detection**: automatic detection of string formats including:
|
|
141
|
+
|
|
142
|
+
| Format | Example | JSON Schema |
|
|
143
|
+
| --- | --- | --- |
|
|
144
|
+
| Email | `user@example.com` | `{"format": "email"}` |
|
|
145
|
+
| UUID | `550e8400-e29b-41d4-a716-446655440000` | `{"format": "uuid"}` |
|
|
146
|
+
| Date | `2023-01-15` | `{"format": "date"}` |
|
|
147
|
+
| Date-Time | `2023-01-01T12:00:00Z` | `{"format": "date-time"}` |
|
|
148
|
+
| URI | `https://example.com` | `{"format": "uri"}` |
|
|
149
|
+
| IPv4 | `192.168.1.1` | `{"format": "ipv4"}` |
|
|
150
|
+
* **Cleanup**: automatic removal of unused schemas when running in update mode
|
|
151
|
+
* **Schema Summary**: colored terminal output showing created, updated, deleted and unused schemas
|
|
152
|
+
|
|
153
|
+
## Advanced Usage? Check the [docs](https://miskler.github.io/pytest-jsonschema-snapshot/basic/quick_start.html#then-you-need-to-configure-the-library)!
|
|
154
|
+
|
|
155
|
+
### Best Practices
|
|
156
|
+
|
|
157
|
+
1. **Commit schemas to version control**: Schemas should be part of your repository
|
|
158
|
+
2. **Review schema changes**: When schemas change, review the diffs carefully without `--schema-update` resets.
|
|
159
|
+
3. **Clean up regularly**: Use `--schema-update` periodically to remove unused schemas
|
|
160
|
+
4. **Descriptive names**: Use clear, descriptive names for your schemas
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
<div align="center">
|
|
164
|
+
|
|
165
|
+
## 🤝 Contributing
|
|
166
|
+
|
|
167
|
+
### ***We welcome contributions!***
|
|
168
|
+
|
|
169
|
+
### Quick Contribution Setup
|
|
170
|
+
|
|
171
|
+
</div>
|
|
172
|
+
|
|
173
|
+
```bash
|
|
174
|
+
# Fork the repo, then:
|
|
175
|
+
git clone https://github.com/Miskler/pytest-jsonschema-snapshot.git
|
|
176
|
+
cd jsonschema-diff
|
|
177
|
+
# Install
|
|
178
|
+
make reinstall
|
|
179
|
+
# Ensure everything works
|
|
180
|
+
make test
|
|
181
|
+
make lint
|
|
182
|
+
make type-check
|
|
183
|
+
# After code editing
|
|
184
|
+
make format
|
|
185
|
+
```
|
|
186
|
+
|
|
187
|
+
<div align="center">
|
|
188
|
+
|
|
189
|
+
## 📄 License
|
|
190
|
+
|
|
191
|
+
MIT License - see [LICENSE](LICENSE) file for details.
|
|
192
|
+
|
|
193
|
+
*Made with ❤️ for developers working with evolving JSON schemas*
|
|
194
|
+
|
|
195
|
+
</div>
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
pytest_jsonschema_snapshot/__init__.py,sha256=_DWc4aUyqwppG2WR6nwnao8wmt2PYdkPmf9jjoYSSLM,385
|
|
2
|
+
pytest_jsonschema_snapshot/core.py,sha256=DoFQtphueFy-lkVuZ6tNJ146QqPskpLZm2EDcy5HDDw,9498
|
|
3
|
+
pytest_jsonschema_snapshot/plugin.py,sha256=_NUXU7RdKeVltzSnYkru5MSrbm7auHeepuCEpJ2FN3Q,6829
|
|
4
|
+
pytest_jsonschema_snapshot/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
|
+
pytest_jsonschema_snapshot/stats.py,sha256=XGGzHY0ytMFOkFpnqNAK1DpV9iI0_fZPWrVvHFNFL3g,7943
|
|
6
|
+
pytest_jsonschema_snapshot/tools/__init__.py,sha256=WMS6PdgMABBfTRhPGuoUOXB-R2PcqcadwH8pG1C6MFU,132
|
|
7
|
+
pytest_jsonschema_snapshot/tools/name_maker.py,sha256=tqss8NCGSo2aQX_-RkCJzy3NJx_TDA-xrn8qsblecf0,5799
|
|
8
|
+
pytest_jsonschema_snapshot/tools/genson_addon/__init__.py,sha256=nANkqHTaWTZPwBDztsnQvObHUZLSeHenJS--oWfep8c,92
|
|
9
|
+
pytest_jsonschema_snapshot/tools/genson_addon/format_detector.py,sha256=Wc5pB_xstyr4OtjwJ2qqmV62xET63cN7Nb0gxkrYyW0,1636
|
|
10
|
+
pytest_jsonschema_snapshot/tools/genson_addon/to_schema_converter.py,sha256=cMjvTydaD1xLnGpby1Mlp3LVzZgZLXdCCUKYcU4y4TU,4364
|
|
11
|
+
pytest_jsonschema_snapshot-0.2.0.dist-info/METADATA,sha256=hJWbUjB-N1dwj8XeAOmnycurpDKdBXJPwLkYijVn6qw,7569
|
|
12
|
+
pytest_jsonschema_snapshot-0.2.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
13
|
+
pytest_jsonschema_snapshot-0.2.0.dist-info/entry_points.txt,sha256=eJ1x4TMmhcc8YtM7IoCsUJO4-rLeTrGy8tPgkrojjKs,58
|
|
14
|
+
pytest_jsonschema_snapshot-0.2.0.dist-info/licenses/LICENSE,sha256=1HRFdSzlJ4BtHv6U7tZun3iCArjbCnm5NUowE9hZpNs,1071
|
|
15
|
+
pytest_jsonschema_snapshot-0.2.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Open Inflation
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|