data-sitter 0.1.4__tar.gz → 0.1.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. data_sitter-0.1.6/PKG-INFO +220 -0
  2. {data_sitter-0.1.4 → data_sitter-0.1.6}/README.md +81 -17
  3. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter/Contract.py +23 -31
  4. data_sitter-0.1.6/data_sitter/FieldResolver.py +62 -0
  5. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter/cli.py +1 -1
  6. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter/field_types/BaseField.py +24 -15
  7. data_sitter-0.1.6/data_sitter/field_types/FieldTypes.py +9 -0
  8. data_sitter-0.1.6/data_sitter/field_types/FloatField.py +26 -0
  9. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter/field_types/IntegerField.py +2 -0
  10. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter/field_types/NumericField.py +11 -9
  11. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter/field_types/StringField.py +15 -13
  12. data_sitter-0.1.6/data_sitter/rules/Enums.py +7 -0
  13. data_sitter-0.1.6/data_sitter/rules/LogicalRule.py +68 -0
  14. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter/rules/MatchedRule.py +17 -14
  15. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter/rules/Parser/alias_parameters_parser.py +0 -20
  16. data_sitter-0.1.6/data_sitter/rules/ProcessedRule.py +24 -0
  17. data_sitter-0.1.6/data_sitter/rules/RuleRegistry.py +86 -0
  18. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter/rules/__init__.py +7 -1
  19. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter/utils/logger_config.py +1 -1
  20. data_sitter-0.1.6/data_sitter.egg-info/PKG-INFO +220 -0
  21. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter.egg-info/SOURCES.txt +9 -2
  22. data_sitter-0.1.6/data_sitter.egg-info/requires.txt +11 -0
  23. {data_sitter-0.1.4 → data_sitter-0.1.6}/pyproject.toml +12 -2
  24. data_sitter-0.1.6/tests/test_cli.py +177 -0
  25. data_sitter-0.1.6/tests/test_contract.py +165 -0
  26. data_sitter-0.1.6/tests/test_field_resolver.py +159 -0
  27. data_sitter-0.1.6/tests/test_validation.py +130 -0
  28. data_sitter-0.1.4/PKG-INFO +0 -9
  29. data_sitter-0.1.4/data_sitter/FieldResolver.py +0 -49
  30. data_sitter-0.1.4/data_sitter/field_types/FloatField.py +0 -17
  31. data_sitter-0.1.4/data_sitter/rules/RuleRegistry.py +0 -65
  32. data_sitter-0.1.4/data_sitter.egg-info/PKG-INFO +0 -9
  33. data_sitter-0.1.4/data_sitter.egg-info/requires.txt +0 -4
  34. data_sitter-0.1.4/setup.py +0 -20
  35. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter/Validation.py +0 -0
  36. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter/__init__.py +0 -0
  37. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter/field_types/__init__.py +0 -0
  38. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter/rules/Parser/RuleParser.py +0 -0
  39. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter/rules/Parser/__init__.py +0 -0
  40. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter/rules/Parser/parser_utils.py +0 -0
  41. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter/rules/Rule.py +0 -0
  42. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter/utils/__init__.py +0 -0
  43. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter.egg-info/dependency_links.txt +0 -0
  44. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter.egg-info/entry_points.txt +0 -0
  45. {data_sitter-0.1.4 → data_sitter-0.1.6}/data_sitter.egg-info/top_level.txt +0 -0
  46. {data_sitter-0.1.4 → data_sitter-0.1.6}/setup.cfg +0 -0
@@ -0,0 +1,220 @@
1
+ Metadata-Version: 2.4
2
+ Name: data-sitter
3
+ Version: 0.1.6
4
+ Summary: A Python library that reads data contracts and generates Pydantic models for seamless data validation.
5
+ Author-email: Lázaro Pereira Candea <lazaro@candea.es>
6
+ Requires-Python: >=3.8
7
+ Description-Content-Type: text/markdown
8
+ Requires-Dist: python-dotenv==1.0.1
9
+ Requires-Dist: PyYAML==6.0.2
10
+ Requires-Dist: parse_type==0.6.4
11
+ Requires-Dist: pydantic==2.10.5
12
+ Provides-Extra: dev
13
+ Requires-Dist: pytest==8.3.5; extra == "dev"
14
+ Requires-Dist: pytest-cov==6.0.0; extra == "dev"
15
+ Requires-Dist: pytest-mock==3.14.0; extra == "dev"
16
+ Requires-Dist: twine==6.1.0; extra == "dev"
17
+ Requires-Dist: build==1.2.2.post1; extra == "dev"
18
+
19
+ # Data-Sitter
20
+
21
+ ![Coverage](./coverage.svg)
22
+
23
+ ## Overview
24
+
25
+ Data-Sitter is a Python library designed to simplify data validation by converting data contracts into Pydantic models. This allows for easy and efficient validation of structured data, ensuring compliance with predefined rules and constraints.
26
+
27
+ ## Features
28
+
29
+ - Define structured data contracts in JSON format.
30
+ - Generate Pydantic models automatically from contracts.
31
+ - Enforce validation rules at the field level.
32
+ - Support for rule references within the contract.
33
+
34
+ ## Installation
35
+
36
+ ```sh
37
+ pip install data-sitter
38
+ ```
39
+
40
+ ## Development and Deployment
41
+
42
+ ### CI/CD Pipeline
43
+
44
+ The project uses GitHub Actions for continuous integration and deployment:
45
+
46
+ 1. **Pull Request Checks**
47
+ - Automatically checks if the version has been bumped in `pyproject.toml`
48
+ - Fails if the version is the same as in the main branch
49
+ - Ensures every PR includes a version update
50
+
51
+ 2. **Automatic Releases**
52
+ - When code is merged to the main branch:
53
+ - Builds the package
54
+ - Publishes to PyPI automatically
55
+ - Uses PyPI API token for secure authentication
56
+
57
+ To set up the CI/CD pipeline:
58
+
59
+ 1. Create a PyPI API token:
60
+ - Go to [PyPI Account Settings](https://pypi.org/manage/account/)
61
+ - Create a new API token with "Upload" scope
62
+ - Copy the token
63
+
64
+ 2. Add the token to GitHub:
65
+ - Go to your repository's Settings > Secrets and variables > Actions
66
+ - Create a new secret named `PYPI_API_TOKEN`
67
+ - Paste your PyPI API token
68
+
69
+ ### Setting Up Development Environment
70
+
71
+ To set up a development environment with all the necessary tools, install the package with development dependencies:
72
+
73
+ ```sh
74
+ pip install -e ".[dev]"
75
+ ```
76
+
77
+ This will install:
78
+ - The package in editable mode
79
+ - Testing tools (pytest, pytest-cov, pytest-mock)
80
+ - Build tools (build, twine)
81
+
82
+ ### Building the Package
83
+
84
+ To build the package, run:
85
+
86
+ ```sh
87
+ python -m build
88
+ ```
89
+
90
+ This will create a `dist` directory containing both a source distribution (`.tar.gz`) and a wheel (`.whl`).
91
+
92
+ ### Deploying to PyPI
93
+
94
+ To upload to PyPI:
95
+
96
+ ```sh
97
+ twine upload dist/*
98
+ ```
99
+
100
+ You'll be prompted for your PyPI username and password. For security, it's recommended to use an API token instead of your password.
101
+
102
+ ## Usage
103
+
104
+ ### Creating a Pydantic Model from a Contract
105
+
106
+ To convert a data contract into a Pydantic model, follow these steps:
107
+
108
+ ```python
109
+ from data_sitter import Contract
110
+
111
+ contract_dict = {
112
+ "name": "test",
113
+ "fields": [
114
+ {
115
+ "name": "FID",
116
+ "type": "Integer",
117
+ "rules": ["Positive"]
118
+ },
119
+ {
120
+ "name": "SECCLASS",
121
+ "type": "String",
122
+ "rules": [
123
+ "Validate Not Null",
124
+ "Value In ['UNCLASSIFIED', 'CLASSIFIED']",
125
+ ]
126
+ }
127
+ ],
128
+ }
129
+
130
+ contract = Contract.from_dict(contract_dict)
131
+ pydantic_contract = contract.pydantic_model
132
+ ```
133
+
134
+ ### Using Rule References
135
+
136
+ Data-Sitter allows you to define reusable values in the `values` key and reference them in field rules using `$values.[key]`. For example:
137
+
138
+ ```json
139
+ {
140
+ "name": "example_contract",
141
+ "fields": [
142
+ {
143
+ "name": "CATEGORY",
144
+ "type": "String",
145
+ "rules": ["Value In $values.categories"]
146
+ },
147
+ {
148
+ "name": "NAME",
149
+ "type": "String",
150
+ "rules": [
151
+ "Length Between $values.min_length and $values.max_length"
152
+ ]
153
+ }
154
+
155
+ ],
156
+ "values": {"categories": ["A", "B", "C"], "min_length": 5,"max_length": 50}
157
+ }
158
+ ```
159
+
160
+ ## Available Rules
161
+
162
+ The available validation rules can be retrieved programmatically:
163
+
164
+ ```python
165
+ from data_sitter import RuleRegistry
166
+
167
+ rules = RuleRegistry.get_rules_definition()
168
+ print(rules)
169
+ ```
170
+
171
+ ### Rule Definitions
172
+
173
+ Below are the available rules grouped by field type:
174
+
175
+ #### Base
176
+
177
+ - Is not null
178
+
179
+ #### String - (Inherits from `Base`)
180
+
181
+ - Is not empty
182
+ - Starts with {prefix:String}
183
+ - Ends with {suffix:String}
184
+ - Is not one of {possible_values:Strings}
185
+ - Is one of {possible_values:Strings}
186
+ - Has length between {min_val:Integer} and {max_val:Integer}
187
+ - Has maximum length {max_len:Integer}
188
+ - Has minimum length {min_len:Integer}
189
+ - Is uppercase
190
+ - Is lowercase
191
+ - Matches regex {pattern:String}
192
+ - Is valid email
193
+ - Is valid URL
194
+ - Has no digits
195
+
196
+ #### Numeric - (Inherits from `Base`)
197
+
198
+ - Is not zero
199
+ - Is positive
200
+ - Is negative
201
+ - Is at least {min_val:Number}
202
+ - Is at most {max_val:Number}
203
+ - Is greater than {threshold:Number}
204
+ - Is less than {threshold:Number}
205
+ - Is not between {min_val:Number} and {max_val:Number}
206
+ - Is between {min_val:Number} and {max_val:Number}
207
+
208
+ #### Integer - (Inherits from `Numeric`)
209
+
210
+ #### Float - (Inherits from `Numeric`)
211
+
212
+ - Has at most {decimal_places:Integer} decimal places
213
+
214
+ ## Contributing
215
+
216
+ Contributions are welcome! Feel free to submit issues or pull requests in the [GitHub repository](https://github.com/lcandea/data-sitter).
217
+
218
+ ## License
219
+
220
+ Data-Sitter is licensed under the MIT License.
@@ -1,5 +1,7 @@
1
1
  # Data-Sitter
2
2
 
3
+ ![Coverage](./coverage.svg)
4
+
3
5
  ## Overview
4
6
 
5
7
  Data-Sitter is a Python library designed to simplify data validation by converting data contracts into Pydantic models. This allows for easy and efficient validation of structured data, ensuring compliance with predefined rules and constraints.
@@ -17,6 +19,68 @@ Data-Sitter is a Python library designed to simplify data validation by converti
17
19
  pip install data-sitter
18
20
  ```
19
21
 
22
+ ## Development and Deployment
23
+
24
+ ### CI/CD Pipeline
25
+
26
+ The project uses GitHub Actions for continuous integration and deployment:
27
+
28
+ 1. **Pull Request Checks**
29
+ - Automatically checks if the version has been bumped in `pyproject.toml`
30
+ - Fails if the version is the same as in the main branch
31
+ - Ensures every PR includes a version update
32
+
33
+ 2. **Automatic Releases**
34
+ - When code is merged to the main branch:
35
+ - Builds the package
36
+ - Publishes to PyPI automatically
37
+ - Uses PyPI API token for secure authentication
38
+
39
+ To set up the CI/CD pipeline:
40
+
41
+ 1. Create a PyPI API token:
42
+ - Go to [PyPI Account Settings](https://pypi.org/manage/account/)
43
+ - Create a new API token with "Upload" scope
44
+ - Copy the token
45
+
46
+ 2. Add the token to GitHub:
47
+ - Go to your repository's Settings > Secrets and variables > Actions
48
+ - Create a new secret named `PYPI_API_TOKEN`
49
+ - Paste your PyPI API token
50
+
51
+ ### Setting Up Development Environment
52
+
53
+ To set up a development environment with all the necessary tools, install the package with development dependencies:
54
+
55
+ ```sh
56
+ pip install -e ".[dev]"
57
+ ```
58
+
59
+ This will install:
60
+ - The package in editable mode
61
+ - Testing tools (pytest, pytest-cov, pytest-mock)
62
+ - Build tools (build, twine)
63
+
64
+ ### Building the Package
65
+
66
+ To build the package, run:
67
+
68
+ ```sh
69
+ python -m build
70
+ ```
71
+
72
+ This will create a `dist` directory containing both a source distribution (`.tar.gz`) and a wheel (`.whl`).
73
+
74
+ ### Deploying to PyPI
75
+
76
+ To upload to PyPI:
77
+
78
+ ```sh
79
+ twine upload dist/*
80
+ ```
81
+
82
+ You'll be prompted for your PyPI username and password. For security, it's recommended to use an API token instead of your password.
83
+
20
84
  ## Usage
21
85
 
22
86
  ### Creating a Pydantic Model from a Contract
@@ -30,14 +94,14 @@ contract_dict = {
30
94
  "name": "test",
31
95
  "fields": [
32
96
  {
33
- "field_name": "FID",
34
- "field_type": "IntegerField",
35
- "field_rules": ["Positive"]
97
+ "name": "FID",
98
+ "type": "Integer",
99
+ "rules": ["Positive"]
36
100
  },
37
101
  {
38
- "field_name": "SECCLASS",
39
- "field_type": "StringField",
40
- "field_rules": [
102
+ "name": "SECCLASS",
103
+ "type": "String",
104
+ "rules": [
41
105
  "Validate Not Null",
42
106
  "Value In ['UNCLASSIFIED', 'CLASSIFIED']",
43
107
  ]
@@ -58,14 +122,14 @@ Data-Sitter allows you to define reusable values in the `values` key and referen
58
122
  "name": "example_contract",
59
123
  "fields": [
60
124
  {
61
- "field_name": "CATEGORY",
62
- "field_type": "StringField",
63
- "field_rules": ["Value In $values.categories"]
125
+ "name": "CATEGORY",
126
+ "type": "String",
127
+ "rules": ["Value In $values.categories"]
64
128
  },
65
129
  {
66
- "field_name": "NAME",
67
- "field_type": "StringField",
68
- "field_rules": [
130
+ "name": "NAME",
131
+ "type": "String",
132
+ "rules": [
69
133
  "Length Between $values.min_length and $values.max_length"
70
134
  ]
71
135
  }
@@ -90,11 +154,11 @@ print(rules)
90
154
 
91
155
  Below are the available rules grouped by field type:
92
156
 
93
- #### BaseField
157
+ #### Base
94
158
 
95
159
  - Is not null
96
160
 
97
- #### StringField - (Inherits from `BaseField`)
161
+ #### String - (Inherits from `Base`)
98
162
 
99
163
  - Is not empty
100
164
  - Starts with {prefix:String}
@@ -111,7 +175,7 @@ Below are the available rules grouped by field type:
111
175
  - Is valid URL
112
176
  - Has no digits
113
177
 
114
- #### NumericField - (Inherits from `BaseField`)
178
+ #### Numeric - (Inherits from `Base`)
115
179
 
116
180
  - Is not zero
117
181
  - Is positive
@@ -123,9 +187,9 @@ Below are the available rules grouped by field type:
123
187
  - Is not between {min_val:Number} and {max_val:Number}
124
188
  - Is between {min_val:Number} and {max_val:Number}
125
189
 
126
- #### IntegerField - (Inherits from `NumericField`)
190
+ #### Integer - (Inherits from `Numeric`)
127
191
 
128
- #### FloatField - (Inherits from `NumericField`)
192
+ #### Float - (Inherits from `Numeric`)
129
193
 
130
194
  - Has at most {decimal_places:Integer} decimal places
131
195
 
@@ -8,7 +8,7 @@ from pydantic import BaseModel
8
8
  from .Validation import Validation
9
9
  from .field_types import BaseField
10
10
  from .FieldResolver import FieldResolver
11
- from .rules import MatchedRule, RuleRegistry, RuleParser
11
+ from .rules import ProcessedRule, RuleRegistry, RuleParser
12
12
 
13
13
 
14
14
  class ContractWithoutFields(Exception):
@@ -20,9 +20,9 @@ class ContractWithoutName(Exception):
20
20
 
21
21
 
22
22
  class Field(NamedTuple):
23
- field_name: str
24
- field_type: str
25
- field_rules: List[str]
23
+ name: str
24
+ type: str
25
+ rules: List[str]
26
26
 
27
27
 
28
28
  class Contract:
@@ -37,8 +37,8 @@ class Contract:
37
37
  self.fields = fields
38
38
  self.rule_parser = RuleParser(values)
39
39
  self.field_resolvers = {
40
- field_type: FieldResolver(RuleRegistry.get_type(field_type), self.rule_parser)
41
- for field_type in list({field.field_type for field in self.fields}) # Unique types
40
+ _type: FieldResolver(RuleRegistry.get_type(_type), self.rule_parser)
41
+ for _type in list({field.type for field in self.fields}) # Unique types
42
42
  }
43
43
 
44
44
  @classmethod
@@ -66,21 +66,18 @@ class Contract:
66
66
  def field_validators(self) -> Dict[str, BaseField]:
67
67
  field_validators = {}
68
68
  for field in self.fields:
69
- field_resolver = self.field_resolvers[field.field_type]
70
- field_validators[field.field_name] = field_resolver.get_field_validator(field.field_name, field.field_rules)
69
+ field_resolver = self.field_resolvers[field.type]
70
+ field_validators[field.name] = field_resolver.get_field_validator(field.name, field.rules)
71
71
  return field_validators
72
72
 
73
73
  @cached_property
74
- def rules(self) -> Dict[str, List[MatchedRule]]:
74
+ def rules(self) -> Dict[str, List[ProcessedRule]]:
75
75
  rules = {}
76
76
  for field in self.fields:
77
- field_resolver = self.field_resolvers[field.field_type]
78
- rules[field.field_name] = field_resolver.get_matched_rules(field.field_rules)
77
+ field_resolver = self.field_resolvers[field.type]
78
+ rules[field.name] = field_resolver.get_processed_rules(field.rules)
79
79
  return rules
80
80
 
81
- def model_validate(self, item: dict):
82
- return self.pydantic_model.model_validate(item).model_dump()
83
-
84
81
  def validate(self, item: dict) -> Validation:
85
82
  return Validation.validate(self.pydantic_model, item)
86
83
 
@@ -88,8 +85,8 @@ class Contract:
88
85
  def pydantic_model(self) -> BaseModel:
89
86
  return type(self.name, (BaseModel,), {
90
87
  "__annotations__": {
91
- field_name: field_validator.get_annotation()
92
- for field_name, field_validator in self.field_validators.items()
88
+ name: field_validator.get_annotation()
89
+ for name, field_validator in self.field_validators.items()
93
90
  }
94
91
  })
95
92
 
@@ -99,11 +96,11 @@ class Contract:
99
96
  "name": self.name,
100
97
  "fields": [
101
98
  {
102
- "field_name": field_name,
103
- "field_type": field_validator.__class__.__name__,
104
- "field_rules": [rule.parsed_rule for rule in self.rules.get(field_name, [])]
99
+ "name": name,
100
+ "type": field_validator.type_name.value,
101
+ "rules": [rule.parsed_rule for rule in self.rules.get(name, [])]
105
102
  }
106
- for field_name, field_validator in self.field_validators.items()
103
+ for name, field_validator in self.field_validators.items()
107
104
  ],
108
105
  "values": self.rule_parser.values
109
106
  }
@@ -119,19 +116,14 @@ class Contract:
119
116
  "name": self.name,
120
117
  "fields": [
121
118
  {
122
- "field_name": field_name,
123
- "field_type": field_validator.__class__.__name__,
124
- "field_rules": [
125
- {
126
- "rule": rule.field_rule,
127
- "parsed_rule": rule.parsed_rule,
128
- "rule_params": rule.rule_params,
129
- "parsed_values": rule.parsed_values,
130
- }
131
- for rule in self.rules.get(field_name, [])
119
+ "name": name,
120
+ "type": field_validator.type_name.value,
121
+ "rules": [
122
+ rule.get_front_end_repr()
123
+ for rule in self.rules.get(name, [])
132
124
  ]
133
125
  }
134
- for field_name, field_validator in self.field_validators.items()
126
+ for name, field_validator in self.field_validators.items()
135
127
  ],
136
128
  "values": self.rule_parser.values
137
129
  }
@@ -0,0 +1,62 @@
1
+ from typing import Dict, List, Type, Union
2
+
3
+ from .field_types import BaseField
4
+ from .rules import Rule, ProcessedRule, LogicalRule, MatchedRule, RuleRegistry, LogicalOperator
5
+ from .rules.Parser import RuleParser
6
+
7
+
8
+ class RuleNotFoundError(Exception):
9
+ """No matching rule found for the given parsed rule."""
10
+
11
+
12
+ class MalformedLogicalRuleError(Exception):
13
+ """Logical rule structure not recognised."""
14
+
15
+
16
+ class FieldResolver:
17
+ field_class: Type[BaseField]
18
+ rule_parser: RuleParser
19
+ rules: List[Rule]
20
+ _match_rule_cache: Dict[str, MatchedRule]
21
+
22
+ def __init__(self, field_class: Type[BaseField], rule_parser: RuleParser) -> None:
23
+ self.field_class = field_class
24
+ self.rule_parser = rule_parser
25
+ self.rules = RuleRegistry.get_rules_for(field_class)
26
+ self._match_rule_cache = {}
27
+
28
+ def get_field_validator(self, name: str, parsed_rules: List[Union[str, dict]]) -> BaseField:
29
+ field_validator = self.field_class(name)
30
+ processed_rules = self.get_processed_rules(parsed_rules)
31
+ validators = [pr.get_validator(field_validator) for pr in processed_rules]
32
+ field_validator.validators = validators
33
+ return field_validator
34
+
35
+ def get_processed_rules(self, parsed_rules: List[Union[str, dict]]) -> List[ProcessedRule]:
36
+ processed_rules = []
37
+ for parsed_rule in parsed_rules:
38
+ if isinstance(parsed_rule, dict):
39
+ if len(keys := tuple(parsed_rule)) != 1 or (operator := keys[0]) not in LogicalOperator:
40
+ raise MalformedLogicalRuleError()
41
+ if operator == LogicalOperator.NOT and not isinstance(parsed_rule[operator], list):
42
+ parsed_rule = {operator: [parsed_rule[operator]]} # NOT operator can be a single rule
43
+ processed_rule = LogicalRule(operator, self.get_processed_rules(parsed_rule[operator]))
44
+ elif isinstance(parsed_rule, str):
45
+ processed_rule = self._match_rule(parsed_rule)
46
+ if not processed_rule:
47
+ raise RuleNotFoundError(f"Rule not found for parsed rule: '{parsed_rule}'")
48
+ else:
49
+ raise TypeError(f'Parsed Rule type not recognised: {type(parsed_rule)}')
50
+ processed_rules.append(processed_rule)
51
+ return processed_rules
52
+
53
+ def _match_rule(self, parsed_rule: str) -> MatchedRule:
54
+ if parsed_rule in self._match_rule_cache:
55
+ return self._match_rule_cache[parsed_rule]
56
+
57
+ for rule in self.rules:
58
+ matched_rule = self.rule_parser.match(rule, parsed_rule)
59
+ if matched_rule:
60
+ self._match_rule_cache[parsed_rule] = matched_rule
61
+ return matched_rule
62
+ return None
@@ -44,5 +44,5 @@ def main():
44
44
  print(f"The file {args.file} pass the contract {args.contract}")
45
45
 
46
46
 
47
- if __name__ == '__main__':
47
+ if __name__ == '__main__': # pragma: no cover
48
48
  main()
@@ -1,18 +1,24 @@
1
1
  from abc import ABC
2
- from typing import Annotated, List, Optional, Type
2
+ from typing import Annotated, Callable, List, Optional, Type
3
3
 
4
4
  from pydantic import AfterValidator
5
+
6
+ from .FieldTypes import FieldTypes
5
7
  from ..rules import register_rule, register_field
6
8
 
7
9
 
8
- def aggregated_validator(validators: List[callable], is_optional: bool):
9
- def _validator(value):
10
+ class NotInitialisedError(Exception):
11
+ """The field instance is initialised without validators"""
12
+
13
+
14
+ def aggregated_validator(validators: List[Callable], is_optional: bool):
15
+ def validator(value):
10
16
  if is_optional and value is None:
11
17
  return value
12
18
  for validator_func in validators:
13
19
  validator_func(value)
14
20
  return value
15
- return _validator
21
+ return validator
16
22
 
17
23
  @register_field
18
24
  class BaseField(ABC):
@@ -20,39 +26,42 @@ class BaseField(ABC):
20
26
  is_optional: bool
21
27
  validators = None
22
28
  field_type = None
29
+ type_name = FieldTypes.BASE
23
30
 
24
31
  def __init__(self, name: str) -> None:
25
32
  self.name = name
26
33
  self.is_optional = True
27
- self.validators = []
34
+ self.validators = None
28
35
 
29
36
  @register_rule("Is not null")
30
37
  def validator_not_null(self):
31
- def _validator(value):
32
- if self.is_optional:
33
- return value
38
+ def validator(value):
34
39
  if value is None:
35
40
  raise ValueError("Value cannot be null.")
36
41
  return value
37
42
 
38
43
  self.is_optional = False
39
- self.validators.append(_validator)
44
+ return validator
40
45
 
41
46
  def validate(self, value):
47
+ if self.validators is None:
48
+ raise NotInitialisedError()
42
49
  for validator in self.validators:
43
50
  validator(value)
44
51
 
45
52
  def get_annotation(self):
53
+ if self.validators is None:
54
+ raise NotInitialisedError()
46
55
  field_type = Optional[self.field_type] if self.is_optional else self.field_type
47
56
  return Annotated[field_type, AfterValidator(aggregated_validator(self.validators, self.is_optional))]
48
57
 
49
58
  @classmethod
50
59
  def get_parents(cls: Type["BaseField"]) -> List[Type["BaseField"]]:
51
- if cls.__name__ == "BaseField":
60
+ if cls == BaseField:
52
61
  return []
53
- ancestors = []
62
+ ancestors = set()
54
63
  for base in cls.__bases__:
55
- if base.__name__.endswith("Field"):
56
- ancestors.append(base)
57
- ancestors.extend(base.get_parents()) # It wont break because we have a base case
58
- return ancestors
64
+ if issubclass(base, BaseField):
65
+ ancestors.add(base)
66
+ ancestors.update(base.get_parents())
67
+ return list(ancestors)
@@ -0,0 +1,9 @@
1
+ from enum import StrEnum
2
+
3
+
4
+ class FieldTypes(StrEnum):
5
+ BASE = "Base"
6
+ INT = "Integer"
7
+ FLOAT = "Float"
8
+ STRING = "String"
9
+ NUMERIC = "Numeric"
@@ -0,0 +1,26 @@
1
+ from .FieldTypes import FieldTypes
2
+ from .NumericField import NumericField
3
+ from ..rules import register_field, register_rule
4
+ from decimal import Decimal
5
+
6
+
7
+ @register_field
8
+ class FloatField(NumericField):
9
+ field_type = float
10
+ type_name = FieldTypes.FLOAT
11
+
12
+
13
+ @register_rule("Has at most {decimal_places:Integer} decimal places")
14
+ def validate_max_decimal_places(self, decimal_places: int):
15
+ def validator(value):
16
+ decimal_str = str(Decimal(str(value)).normalize())
17
+ # If no decimal point or only zeros after decimal, it has 0 decimal places
18
+ if '.' not in decimal_str:
19
+ decimal_places_count = 0
20
+ else:
21
+ decimal_places_count = len(decimal_str.split('.')[1])
22
+
23
+ if decimal_places_count > decimal_places:
24
+ raise ValueError(f"Value must have at most {decimal_places} decimal places.")
25
+ return value
26
+ return validator
@@ -1,3 +1,4 @@
1
+ from .FieldTypes import FieldTypes
1
2
  from .NumericField import NumericField
2
3
  from ..rules import register_field
3
4
 
@@ -5,3 +6,4 @@ from ..rules import register_field
5
6
  @register_field
6
7
  class IntegerField(NumericField):
7
8
  field_type = int
9
+ type_name = FieldTypes.INT