db-test-helpers 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- db_test_helpers-0.1.0/.gitignore +13 -0
- db_test_helpers-0.1.0/PKG-INFO +36 -0
- db_test_helpers-0.1.0/README.md +13 -0
- db_test_helpers-0.1.0/pyproject.toml +39 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/__init__.py +8 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/_errors.py +5 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/_integration_test.py +164 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/_matcher_types.py +69 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/_matchers.py +61 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/_matchers_test.py +146 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/_parse.py +73 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/_parse_test.py +145 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/_parser.py +174 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/_parser_test.py +120 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/_set.py +75 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/_set_test.py +256 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/_types.py +44 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/_verify.py +206 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/_verify_test.py +497 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/fixtures/expected_basic.yaml +18 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/fixtures/expected_children.yaml +15 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/fixtures/expected_with_any.yaml +5 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/fixtures/expected_with_matchers.yaml +18 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/fixtures/expected_with_var.yaml +4 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/fixtures/seed_basic.yaml +18 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/fixtures/seed_children.yaml +15 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/fixtures/seed_with_any.yaml +3 -0
- db_test_helpers-0.1.0/src/db_test_helpers/core/fixtures/seed_with_var.yaml +4 -0
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: db-test-helpers
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: YAML-driven database test data setup and verification helpers
|
|
5
|
+
Project-URL: Homepage, https://github.com/takaaa220/db-test-helpers
|
|
6
|
+
Project-URL: Repository, https://github.com/takaaa220/db-test-helpers
|
|
7
|
+
Project-URL: Issues, https://github.com/takaaa220/db-test-helpers/issues
|
|
8
|
+
Author: takaaa220
|
|
9
|
+
License-Expression: MIT
|
|
10
|
+
Keywords: database,fixtures,test-helpers,testing,yaml
|
|
11
|
+
Classifier: Development Status :: 3 - Alpha
|
|
12
|
+
Classifier: Intended Audience :: Developers
|
|
13
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
14
|
+
Classifier: Programming Language :: Python :: 3
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
16
|
+
Classifier: Topic :: Software Development :: Testing
|
|
17
|
+
Requires-Python: >=3.12
|
|
18
|
+
Requires-Dist: pyyaml>=6.0
|
|
19
|
+
Provides-Extra: dev
|
|
20
|
+
Requires-Dist: pytest>=8.0; extra == 'dev'
|
|
21
|
+
Requires-Dist: ruff>=0.8; extra == 'dev'
|
|
22
|
+
Description-Content-Type: text/markdown
|
|
23
|
+
|
|
24
|
+
# db-test-helpers
|
|
25
|
+
|
|
26
|
+
YAML-driven database test data setup and verification helpers. The core is DB-agnostic; DB-specific operations are handled by adapters.
|
|
27
|
+
|
|
28
|
+
## Installation
|
|
29
|
+
|
|
30
|
+
```bash
|
|
31
|
+
pip install db-test-helpers
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
## Documentation
|
|
35
|
+
|
|
36
|
+
See the [main repository](https://github.com/takaaa220/db-test-helpers) for usage and documentation.
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
# db-test-helpers
|
|
2
|
+
|
|
3
|
+
YAML-driven database test data setup and verification helpers. The core is DB-agnostic; DB-specific operations are handled by adapters.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
pip install db-test-helpers
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Documentation
|
|
12
|
+
|
|
13
|
+
See the [main repository](https://github.com/takaaa220/db-test-helpers) for usage and documentation.
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "db-test-helpers"
|
|
3
|
+
version = "0.1.0"
|
|
4
|
+
description = "YAML-driven database test data setup and verification helpers"
|
|
5
|
+
license = "MIT"
|
|
6
|
+
readme = "README.md"
|
|
7
|
+
authors = [{ name = "takaaa220" }]
|
|
8
|
+
keywords = ["testing", "database", "yaml", "test-helpers", "fixtures"]
|
|
9
|
+
classifiers = [
|
|
10
|
+
"Development Status :: 3 - Alpha",
|
|
11
|
+
"Intended Audience :: Developers",
|
|
12
|
+
"License :: OSI Approved :: MIT License",
|
|
13
|
+
"Programming Language :: Python :: 3",
|
|
14
|
+
"Programming Language :: Python :: 3.12",
|
|
15
|
+
"Topic :: Software Development :: Testing",
|
|
16
|
+
]
|
|
17
|
+
requires-python = ">=3.12"
|
|
18
|
+
dependencies = [
|
|
19
|
+
"pyyaml>=6.0",
|
|
20
|
+
]
|
|
21
|
+
|
|
22
|
+
[project.urls]
|
|
23
|
+
Homepage = "https://github.com/takaaa220/db-test-helpers"
|
|
24
|
+
Repository = "https://github.com/takaaa220/db-test-helpers"
|
|
25
|
+
Issues = "https://github.com/takaaa220/db-test-helpers/issues"
|
|
26
|
+
|
|
27
|
+
[project.optional-dependencies]
|
|
28
|
+
dev = [
|
|
29
|
+
"pytest>=8.0",
|
|
30
|
+
"ruff>=0.8",
|
|
31
|
+
]
|
|
32
|
+
|
|
33
|
+
[build-system]
|
|
34
|
+
requires = ["hatchling"]
|
|
35
|
+
build-backend = "hatchling.build"
|
|
36
|
+
|
|
37
|
+
[tool.hatch.build.targets.wheel]
|
|
38
|
+
only-include = ["src/db_test_helpers/core"]
|
|
39
|
+
sources = ["src"]
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
"""db-test-helpers: DB-agnostic test data helpers."""
|
|
2
|
+
|
|
3
|
+
from db_test_helpers.core._errors import UnsupportedError
|
|
4
|
+
from db_test_helpers.core._set import set_db_data
|
|
5
|
+
from db_test_helpers.core._types import Config, DatabaseAdapter, HierarchicalAdapter
|
|
6
|
+
from db_test_helpers.core._verify import verify_db_data
|
|
7
|
+
|
|
8
|
+
__all__ = ["Config", "DatabaseAdapter", "HierarchicalAdapter", "UnsupportedError", "set_db_data", "verify_db_data"]
|
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
"""Integration tests using YAML fixture files with a mock in-memory adapter."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
import pytest
|
|
9
|
+
|
|
10
|
+
from db_test_helpers.core import Config, set_db_data, verify_db_data
|
|
11
|
+
|
|
12
|
+
FIXTURES_DIR = str(Path(__file__).parent / "fixtures")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class InMemoryAdapter:
|
|
16
|
+
"""In-memory adapter that stores records keyed by collection path."""
|
|
17
|
+
|
|
18
|
+
def __init__(self):
|
|
19
|
+
self._store: dict[str, list[dict[str, Any]]] = {}
|
|
20
|
+
self._auto_id_counter = 0
|
|
21
|
+
|
|
22
|
+
def clear_group(self, group: str, parent_path: str) -> None:
|
|
23
|
+
col_path = f"{parent_path}/{group}" if parent_path else group
|
|
24
|
+
# Clear all entries that start with col_path (recursive)
|
|
25
|
+
keys_to_delete = [k for k in self._store if k == col_path or k.startswith(col_path + "/")]
|
|
26
|
+
for k in keys_to_delete:
|
|
27
|
+
del self._store[k]
|
|
28
|
+
|
|
29
|
+
def write_record(self, group: str, parent_path: str, data: dict[str, Any]) -> dict[str, Any]:
|
|
30
|
+
col_path = f"{parent_path}/{group}" if parent_path else group
|
|
31
|
+
data = dict(data)
|
|
32
|
+
doc_id = data.pop("__document_id__", None)
|
|
33
|
+
if doc_id is None:
|
|
34
|
+
self._auto_id_counter += 1
|
|
35
|
+
doc_id = f"auto-{self._auto_id_counter}"
|
|
36
|
+
record = {"__document_id__": doc_id, **data}
|
|
37
|
+
self._store.setdefault(col_path, []).append(record)
|
|
38
|
+
return record
|
|
39
|
+
|
|
40
|
+
def read_records(self, group: str, parent_path: str) -> list[dict[str, Any]]:
|
|
41
|
+
col_path = f"{parent_path}/{group}" if parent_path else group
|
|
42
|
+
return list(self._store.get(col_path, []))
|
|
43
|
+
|
|
44
|
+
def build_child_path(self, parent_path: str, group: str, record: dict[str, Any]) -> str:
|
|
45
|
+
col_path = f"{parent_path}/{group}" if parent_path else group
|
|
46
|
+
return f"{col_path}/{record['__document_id__']}"
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _fixture(name: str) -> str:
|
|
50
|
+
return f"{FIXTURES_DIR}/{name}"
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class TestIntegrationBasic:
|
|
54
|
+
def test_set_and_verify_basic(self):
|
|
55
|
+
"""seed_basic.yaml -> expected_basic.yaml (different order, nested dict/list)"""
|
|
56
|
+
adapter = InMemoryAdapter()
|
|
57
|
+
set_db_data(adapter, _fixture("seed_basic.yaml"))
|
|
58
|
+
verify_db_data(adapter, _fixture("expected_basic.yaml"))
|
|
59
|
+
|
|
60
|
+
def test_set_clears_existing_data(self):
|
|
61
|
+
adapter = InMemoryAdapter()
|
|
62
|
+
set_db_data(adapter, _fixture("seed_basic.yaml"))
|
|
63
|
+
# Second set should clear existing data
|
|
64
|
+
set_db_data(adapter, _fixture("seed_basic.yaml"))
|
|
65
|
+
records = adapter.read_records("users", "")
|
|
66
|
+
assert len(records) == 2
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class TestIntegrationVar:
|
|
70
|
+
def test_set_and_verify_with_var(self):
|
|
71
|
+
"""$var() expansion and verification"""
|
|
72
|
+
adapter = InMemoryAdapter()
|
|
73
|
+
config = Config(variables={"uid": "test-user-123", "username": "Alice"})
|
|
74
|
+
set_db_data(adapter, _fixture("seed_with_var.yaml"), config)
|
|
75
|
+
verify_db_data(adapter, _fixture("expected_with_var.yaml"), config)
|
|
76
|
+
|
|
77
|
+
def test_var_expansion_actually_stored(self):
|
|
78
|
+
"""$var() should be expanded to the actual value when stored"""
|
|
79
|
+
adapter = InMemoryAdapter()
|
|
80
|
+
config = Config(variables={"uid": "test-user-123", "username": "Alice"})
|
|
81
|
+
set_db_data(adapter, _fixture("seed_with_var.yaml"), config)
|
|
82
|
+
records = adapter.read_records("users", "")
|
|
83
|
+
assert len(records) == 1
|
|
84
|
+
assert records[0]["__document_id__"] == "test-user-123"
|
|
85
|
+
assert records[0]["name"] == "Alice"
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
class TestIntegrationAny:
|
|
89
|
+
def test_set_and_verify_with_any(self):
|
|
90
|
+
"""Omitted __document_id__ gets auto ID + verify with $any() wildcard"""
|
|
91
|
+
adapter = InMemoryAdapter()
|
|
92
|
+
set_db_data(adapter, _fixture("seed_with_any.yaml"))
|
|
93
|
+
verify_db_data(adapter, _fixture("expected_with_any.yaml"))
|
|
94
|
+
|
|
95
|
+
def test_omitted_id_generates_auto_id(self):
|
|
96
|
+
"""Omitting __document_id__ should make the adapter generate an auto ID"""
|
|
97
|
+
adapter = InMemoryAdapter()
|
|
98
|
+
set_db_data(adapter, _fixture("seed_with_any.yaml"))
|
|
99
|
+
records = adapter.read_records("users", "")
|
|
100
|
+
assert len(records) == 2
|
|
101
|
+
for r in records:
|
|
102
|
+
assert r["__document_id__"].startswith("auto-")
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
class TestIntegrationChildren:
|
|
106
|
+
def test_set_and_verify_children(self):
|
|
107
|
+
"""Recursive set + verify of __children__ (multi-level, different order)"""
|
|
108
|
+
adapter = InMemoryAdapter()
|
|
109
|
+
set_db_data(adapter, _fixture("seed_children.yaml"))
|
|
110
|
+
verify_db_data(adapter, _fixture("expected_children.yaml"))
|
|
111
|
+
|
|
112
|
+
def test_children_data_structure(self):
|
|
113
|
+
"""__children__ data should be stored at the correct paths"""
|
|
114
|
+
adapter = InMemoryAdapter()
|
|
115
|
+
set_db_data(adapter, _fixture("seed_children.yaml"))
|
|
116
|
+
|
|
117
|
+
users = adapter.read_records("users", "")
|
|
118
|
+
assert len(users) == 1
|
|
119
|
+
assert users[0]["name"] == "Alice"
|
|
120
|
+
|
|
121
|
+
posts = adapter.read_records("posts", "users/user-1")
|
|
122
|
+
assert len(posts) == 2
|
|
123
|
+
|
|
124
|
+
comments = adapter.read_records("comments", "users/user-1/posts/post-1")
|
|
125
|
+
assert len(comments) == 2
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
class TestIntegrationMatchers:
|
|
129
|
+
def test_verify_with_matchers(self):
|
|
130
|
+
"""seed_basic -> expected_with_matchers (verify using DSL matchers)"""
|
|
131
|
+
adapter = InMemoryAdapter()
|
|
132
|
+
set_db_data(adapter, _fixture("seed_basic.yaml"))
|
|
133
|
+
verify_db_data(adapter, _fixture("expected_with_matchers.yaml"))
|
|
134
|
+
|
|
135
|
+
def test_verify_failure_with_wrong_expected(self, tmp_path):
|
|
136
|
+
"""Verification should fail with wrong expected data"""
|
|
137
|
+
adapter = InMemoryAdapter()
|
|
138
|
+
set_db_data(adapter, _fixture("seed_basic.yaml"))
|
|
139
|
+
|
|
140
|
+
wrong = tmp_path / "wrong.yaml"
|
|
141
|
+
wrong.write_text(
|
|
142
|
+
"""\
|
|
143
|
+
users:
|
|
144
|
+
- __document_id__: "user-1"
|
|
145
|
+
name: "Charlie"
|
|
146
|
+
age: 30
|
|
147
|
+
tags:
|
|
148
|
+
- "admin"
|
|
149
|
+
- "user"
|
|
150
|
+
profile:
|
|
151
|
+
city: "Tokyo"
|
|
152
|
+
zip: "100-0001"
|
|
153
|
+
- __document_id__: "user-2"
|
|
154
|
+
name: "Bob"
|
|
155
|
+
age: 25
|
|
156
|
+
tags:
|
|
157
|
+
- "user"
|
|
158
|
+
profile:
|
|
159
|
+
city: "Osaka"
|
|
160
|
+
zip: "530-0001"
|
|
161
|
+
"""
|
|
162
|
+
)
|
|
163
|
+
with pytest.raises(AssertionError):
|
|
164
|
+
verify_db_data(adapter, str(wrong))
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
"""Matcher Protocol and implementations."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Protocol, runtime_checkable
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@runtime_checkable
|
|
9
|
+
class Matcher(Protocol):
|
|
10
|
+
def matches(self, value: object) -> bool: ...
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class AnyMatcher:
|
|
14
|
+
def matches(self, value: object) -> bool:
|
|
15
|
+
return True
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class EqMatcher:
|
|
19
|
+
def __init__(self, expected: object) -> None:
|
|
20
|
+
self._expected = expected
|
|
21
|
+
|
|
22
|
+
def matches(self, value: object) -> bool:
|
|
23
|
+
if type(value) is not type(self._expected):
|
|
24
|
+
return False
|
|
25
|
+
return value == self._expected
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class LtMatcher:
|
|
29
|
+
def __init__(self, threshold: int | float) -> None:
|
|
30
|
+
self._threshold = threshold
|
|
31
|
+
|
|
32
|
+
def matches(self, value: object) -> bool:
|
|
33
|
+
if isinstance(value, bool) or not isinstance(value, (int, float)):
|
|
34
|
+
return False
|
|
35
|
+
return value < self._threshold
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class GtMatcher:
|
|
39
|
+
def __init__(self, threshold: int | float) -> None:
|
|
40
|
+
self._threshold = threshold
|
|
41
|
+
|
|
42
|
+
def matches(self, value: object) -> bool:
|
|
43
|
+
if isinstance(value, bool) or not isinstance(value, (int, float)):
|
|
44
|
+
return False
|
|
45
|
+
return value > self._threshold
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class NotMatcher:
|
|
49
|
+
def __init__(self, inner: Matcher) -> None:
|
|
50
|
+
self._inner = inner
|
|
51
|
+
|
|
52
|
+
def matches(self, value: object) -> bool:
|
|
53
|
+
return not self._inner.matches(value)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class AndMatcher:
|
|
57
|
+
def __init__(self, matchers: list[Matcher]) -> None:
|
|
58
|
+
self._matchers = matchers
|
|
59
|
+
|
|
60
|
+
def matches(self, value: object) -> bool:
|
|
61
|
+
return all(m.matches(value) for m in self._matchers)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class OrMatcher:
|
|
65
|
+
def __init__(self, matchers: list[Matcher]) -> None:
|
|
66
|
+
self._matchers = matchers
|
|
67
|
+
|
|
68
|
+
def matches(self, value: object) -> bool:
|
|
69
|
+
return any(m.matches(value) for m in self._matchers)
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
"""Matcher DSL: public API facade."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import re
|
|
6
|
+
|
|
7
|
+
from db_test_helpers.core._matcher_types import (
|
|
8
|
+
AndMatcher,
|
|
9
|
+
AnyMatcher,
|
|
10
|
+
EqMatcher,
|
|
11
|
+
GtMatcher,
|
|
12
|
+
LtMatcher,
|
|
13
|
+
Matcher,
|
|
14
|
+
NotMatcher,
|
|
15
|
+
OrMatcher,
|
|
16
|
+
)
|
|
17
|
+
from db_test_helpers.core._parser import parse
|
|
18
|
+
|
|
19
|
+
__all__ = [
|
|
20
|
+
"AndMatcher",
|
|
21
|
+
"AnyMatcher",
|
|
22
|
+
"EqMatcher",
|
|
23
|
+
"GtMatcher",
|
|
24
|
+
"LtMatcher",
|
|
25
|
+
"Matcher",
|
|
26
|
+
"NotMatcher",
|
|
27
|
+
"OrMatcher",
|
|
28
|
+
"is_dsl_string",
|
|
29
|
+
"is_var_dsl",
|
|
30
|
+
"parse_matcher",
|
|
31
|
+
"resolve_var",
|
|
32
|
+
]
|
|
33
|
+
|
|
34
|
+
_DSL_PATTERN = re.compile(r"^\$([a-z]+)\(")
|
|
35
|
+
_VAR_PATTERN = re.compile(r"^\$var\(([^)]+)\)$")
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def is_dsl_string(value: object) -> bool:
|
|
39
|
+
if not isinstance(value, str):
|
|
40
|
+
return False
|
|
41
|
+
return _DSL_PATTERN.match(value) is not None
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def is_var_dsl(value: object) -> bool:
|
|
45
|
+
if not isinstance(value, str):
|
|
46
|
+
return False
|
|
47
|
+
return _VAR_PATTERN.match(value) is not None
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def resolve_var(value: str, variables: dict[str, object]) -> object:
|
|
51
|
+
m = _VAR_PATTERN.match(value)
|
|
52
|
+
if m is None:
|
|
53
|
+
raise ValueError(f"Not a $var() expression: {value!r}")
|
|
54
|
+
name = m.group(1).strip()
|
|
55
|
+
if name not in variables:
|
|
56
|
+
raise ValueError(f"Undefined variable: {name!r}")
|
|
57
|
+
return variables[name]
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def parse_matcher(value: str, variables: dict[str, object]) -> Matcher:
|
|
61
|
+
return parse(value, variables)
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
"""Tests for _matchers module."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import pytest
|
|
6
|
+
|
|
7
|
+
from db_test_helpers.core._matchers import (
|
|
8
|
+
AnyMatcher,
|
|
9
|
+
AndMatcher,
|
|
10
|
+
EqMatcher,
|
|
11
|
+
GtMatcher,
|
|
12
|
+
LtMatcher,
|
|
13
|
+
NotMatcher,
|
|
14
|
+
OrMatcher,
|
|
15
|
+
is_dsl_string,
|
|
16
|
+
is_var_dsl,
|
|
17
|
+
resolve_var,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
# --- is_dsl_string ---
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class TestIsDslString:
|
|
25
|
+
def test_valid_dsl(self):
|
|
26
|
+
assert is_dsl_string("$eq(hello)") is True
|
|
27
|
+
assert is_dsl_string("$lt(100)") is True
|
|
28
|
+
assert is_dsl_string("$gt(0)") is True
|
|
29
|
+
assert is_dsl_string("$not($eq(x))") is True
|
|
30
|
+
assert is_dsl_string("$and($gt(0),$lt(10))") is True
|
|
31
|
+
assert is_dsl_string("$or($eq(a),$eq(b))") is True
|
|
32
|
+
assert is_dsl_string("$var(uid)") is True
|
|
33
|
+
assert is_dsl_string("$any()") is True
|
|
34
|
+
|
|
35
|
+
def test_unknown_dsl_function(self):
|
|
36
|
+
assert is_dsl_string("$unknown(x)") is True
|
|
37
|
+
assert is_dsl_string("$foo(bar)") is True
|
|
38
|
+
|
|
39
|
+
def test_non_dsl_string(self):
|
|
40
|
+
assert is_dsl_string("hello") is False
|
|
41
|
+
assert is_dsl_string("") is False
|
|
42
|
+
|
|
43
|
+
def test_non_string(self):
|
|
44
|
+
assert is_dsl_string(123) is False
|
|
45
|
+
assert is_dsl_string(None) is False
|
|
46
|
+
assert is_dsl_string(True) is False
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
# --- is_var_dsl ---
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class TestIsVarDsl:
|
|
53
|
+
def test_var(self):
|
|
54
|
+
assert is_var_dsl("$var(uid)") is True
|
|
55
|
+
|
|
56
|
+
def test_not_var(self):
|
|
57
|
+
assert is_var_dsl("$eq(x)") is False
|
|
58
|
+
assert is_var_dsl("$any()") is False
|
|
59
|
+
assert is_var_dsl("hello") is False
|
|
60
|
+
assert is_var_dsl(123) is False
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
# --- Matchers ---
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class TestAnyMatcher:
|
|
67
|
+
def test_matches_anything(self):
|
|
68
|
+
m = AnyMatcher()
|
|
69
|
+
assert m.matches("hello") is True
|
|
70
|
+
assert m.matches(123) is True
|
|
71
|
+
assert m.matches(None) is True
|
|
72
|
+
assert m.matches(True) is True
|
|
73
|
+
assert m.matches([1, 2]) is True
|
|
74
|
+
assert m.matches({"a": 1}) is True
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class TestEqMatcher:
|
|
78
|
+
def test_matches_same_type_and_value(self):
|
|
79
|
+
assert EqMatcher("hello").matches("hello") is True
|
|
80
|
+
assert EqMatcher(42).matches(42) is True
|
|
81
|
+
|
|
82
|
+
def test_rejects_different_value(self):
|
|
83
|
+
assert EqMatcher("hello").matches("world") is False
|
|
84
|
+
|
|
85
|
+
def test_rejects_different_type(self):
|
|
86
|
+
assert EqMatcher(1).matches(1.0) is False
|
|
87
|
+
assert EqMatcher("1").matches(1) is False
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
class TestLtMatcher:
|
|
91
|
+
def test_less_than(self):
|
|
92
|
+
assert LtMatcher(100).matches(50) is True
|
|
93
|
+
assert LtMatcher(100).matches(100) is False
|
|
94
|
+
assert LtMatcher(100).matches(150) is False
|
|
95
|
+
|
|
96
|
+
def test_rejects_non_number(self):
|
|
97
|
+
assert LtMatcher(100).matches("50") is False
|
|
98
|
+
assert LtMatcher(100).matches(True) is False
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
class TestGtMatcher:
|
|
102
|
+
def test_greater_than(self):
|
|
103
|
+
assert GtMatcher(0).matches(1) is True
|
|
104
|
+
assert GtMatcher(0).matches(0) is False
|
|
105
|
+
assert GtMatcher(0).matches(-1) is False
|
|
106
|
+
|
|
107
|
+
def test_rejects_non_number(self):
|
|
108
|
+
assert GtMatcher(0).matches("1") is False
|
|
109
|
+
assert GtMatcher(0).matches(False) is False
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
class TestNotMatcher:
|
|
113
|
+
def test_negates_inner(self):
|
|
114
|
+
assert NotMatcher(EqMatcher("admin")).matches("user") is True
|
|
115
|
+
assert NotMatcher(EqMatcher("admin")).matches("admin") is False
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
class TestAndMatcher:
|
|
119
|
+
def test_all_must_match(self):
|
|
120
|
+
m = AndMatcher([GtMatcher(0), LtMatcher(100)])
|
|
121
|
+
assert m.matches(50) is True
|
|
122
|
+
assert m.matches(0) is False
|
|
123
|
+
assert m.matches(100) is False
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
class TestOrMatcher:
|
|
127
|
+
def test_any_must_match(self):
|
|
128
|
+
m = OrMatcher([EqMatcher("active"), EqMatcher("pending")])
|
|
129
|
+
assert m.matches("active") is True
|
|
130
|
+
assert m.matches("pending") is True
|
|
131
|
+
assert m.matches("deleted") is False
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
# --- resolve_var ---
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
class TestResolveVar:
|
|
138
|
+
def test_resolves_var(self):
|
|
139
|
+
assert resolve_var("$var(uid)", {"uid": "abc-123"}) == "abc-123"
|
|
140
|
+
|
|
141
|
+
def test_non_string_variable(self):
|
|
142
|
+
assert resolve_var("$var(count)", {"count": 42}) == 42
|
|
143
|
+
|
|
144
|
+
def test_undefined_variable_raises(self):
|
|
145
|
+
with pytest.raises(ValueError, match="Undefined variable"):
|
|
146
|
+
resolve_var("$var(missing)", {})
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
"""YAML parser with __children__ separation."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
import yaml
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class ParsedRecord:
|
|
14
|
+
fields: dict[str, Any]
|
|
15
|
+
children: dict[str, list[ParsedRecord]] | None
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class ParsedGroup:
|
|
20
|
+
name: str
|
|
21
|
+
records: list[ParsedRecord]
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def parse_yaml(filepath: str | Path) -> list[ParsedGroup]:
|
|
25
|
+
"""Parse a YAML fixture file into a list of groups.
|
|
26
|
+
|
|
27
|
+
The YAML top-level must be a dict mapping group names to lists of
|
|
28
|
+
records. The reserved key ``__children__`` is separated from
|
|
29
|
+
regular fields and stored in ``ParsedRecord.children``.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
filepath: Path to the YAML fixture file.
|
|
33
|
+
|
|
34
|
+
Raises:
|
|
35
|
+
ValueError: If the file is empty, not a dict, or has invalid
|
|
36
|
+
structure.
|
|
37
|
+
"""
|
|
38
|
+
with open(filepath) as f:
|
|
39
|
+
data = yaml.safe_load(f)
|
|
40
|
+
|
|
41
|
+
if data is None:
|
|
42
|
+
raise ValueError("YAML file is empty")
|
|
43
|
+
if not isinstance(data, dict):
|
|
44
|
+
raise ValueError(f"YAML top-level must be a dict, got {type(data).__name__}")
|
|
45
|
+
|
|
46
|
+
groups: list[ParsedGroup] = []
|
|
47
|
+
for group_name, records_data in data.items():
|
|
48
|
+
if not isinstance(records_data, list):
|
|
49
|
+
raise ValueError(f"Group '{group_name}' must be a list, got {type(records_data).__name__}")
|
|
50
|
+
records = [_parse_record(group_name, i, r) for i, r in enumerate(records_data)]
|
|
51
|
+
groups.append(ParsedGroup(name=group_name, records=records))
|
|
52
|
+
|
|
53
|
+
return groups
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def _parse_record(group_name: str, index: int, data: Any) -> ParsedRecord:
|
|
57
|
+
if not isinstance(data, dict):
|
|
58
|
+
raise ValueError(f"Record {group_name}[{index}] must be a dict, got {type(data).__name__}")
|
|
59
|
+
|
|
60
|
+
children_raw = data.get("__children__")
|
|
61
|
+
fields = {k: v for k, v in data.items() if k != "__children__"}
|
|
62
|
+
|
|
63
|
+
children: dict[str, list[ParsedRecord]] | None = None
|
|
64
|
+
if children_raw is not None:
|
|
65
|
+
if not isinstance(children_raw, dict):
|
|
66
|
+
raise ValueError(
|
|
67
|
+
f"Record {group_name}[{index}].__children__ must be a dict, got {type(children_raw).__name__}"
|
|
68
|
+
)
|
|
69
|
+
children = {}
|
|
70
|
+
for child_group, child_records in children_raw.items():
|
|
71
|
+
children[child_group] = [_parse_record(child_group, i, r) for i, r in enumerate(child_records)]
|
|
72
|
+
|
|
73
|
+
return ParsedRecord(fields=fields, children=children)
|