amati 0.2.20__tar.gz → 0.2.22__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. {amati-0.2.20 → amati-0.2.22}/.github/workflows/checks.yaml +2 -0
  2. {amati-0.2.20 → amati-0.2.22}/.github/workflows/scorecards.yml +4 -0
  3. {amati-0.2.20 → amati-0.2.22}/.gitignore +4 -1
  4. {amati-0.2.20 → amati-0.2.22}/.pre-commit-config.yaml +5 -4
  5. {amati-0.2.20 → amati-0.2.22}/Dockerfile +2 -1
  6. {amati-0.2.20 → amati-0.2.22}/PKG-INFO +2 -1
  7. {amati-0.2.20 → amati-0.2.22}/README.md +1 -0
  8. {amati-0.2.20 → amati-0.2.22}/amati/amati.py +9 -5
  9. amati-0.2.22/amati/fields/email.py +40 -0
  10. {amati-0.2.20 → amati-0.2.22}/amati/fields/iso9110.py +1 -1
  11. {amati-0.2.20 → amati-0.2.22}/amati/fields/spdx_licences.py +1 -1
  12. {amati-0.2.20 → amati-0.2.22}/amati/fields/uri.py +15 -2
  13. {amati-0.2.20 → amati-0.2.22}/amati/file_handler.py +115 -8
  14. {amati-0.2.20 → amati-0.2.22}/amati/model_validators.py +3 -3
  15. {amati-0.2.20 → amati-0.2.22}/amati/validators/generic.py +33 -7
  16. {amati-0.2.20 → amati-0.2.22}/amati/validators/oas304.py +7 -5
  17. {amati-0.2.20 → amati-0.2.22}/amati/validators/oas311.py +4 -2
  18. {amati-0.2.20 → amati-0.2.22}/bin/checks.sh +4 -2
  19. amati-0.2.22/bin/startup.sh +5 -0
  20. {amati-0.2.20 → amati-0.2.22}/bin/uv-upgrade-from-main.sh +3 -2
  21. {amati-0.2.20 → amati-0.2.22}/pyproject.toml +2 -1
  22. {amati-0.2.20 → amati-0.2.22}/tests/data/DigitalOcean-public.v2.errors.json +1 -1
  23. {amati-0.2.20 → amati-0.2.22}/tests/fields/test_uri.py +2 -2
  24. {amati-0.2.20 → amati-0.2.22}/tests/model_validators/test_all_of.py +2 -2
  25. {amati-0.2.20 → amati-0.2.22}/tests/model_validators/test_at_least_one.py +2 -2
  26. amati-0.2.22/tests/validators/__init__.py +0 -0
  27. {amati-0.2.20 → amati-0.2.22}/uv.lock +84 -52
  28. amati-0.2.20/amati/fields/email.py +0 -21
  29. amati-0.2.20/bin/startup.sh +0 -4
  30. {amati-0.2.20 → amati-0.2.22}/.dockerignore +0 -0
  31. {amati-0.2.20 → amati-0.2.22}/.github/dependabot.yml +0 -0
  32. {amati-0.2.20 → amati-0.2.22}/.github/workflows/codeql.yml +0 -0
  33. {amati-0.2.20 → amati-0.2.22}/.github/workflows/coverage.yaml +0 -0
  34. {amati-0.2.20 → amati-0.2.22}/.github/workflows/data-refresh.yaml +0 -0
  35. {amati-0.2.20 → amati-0.2.22}/.github/workflows/dependency-review.yml +0 -0
  36. {amati-0.2.20 → amati-0.2.22}/.github/workflows/publish.yaml +0 -0
  37. {amati-0.2.20 → amati-0.2.22}/.github/workflows/tag-and-create-release.yaml +0 -0
  38. {amati-0.2.20 → amati-0.2.22}/.python-version +0 -0
  39. {amati-0.2.20 → amati-0.2.22}/LICENSE +0 -0
  40. {amati-0.2.20 → amati-0.2.22}/SECURITY.md +0 -0
  41. {amati-0.2.20 → amati-0.2.22}/TEMPLATE.html +0 -0
  42. {amati-0.2.20 → amati-0.2.22}/amati/__init__.py +0 -0
  43. {amati-0.2.20 → amati-0.2.22}/amati/_data/files/http-status-codes.json +0 -0
  44. {amati-0.2.20 → amati-0.2.22}/amati/_data/files/iso9110.json +0 -0
  45. {amati-0.2.20 → amati-0.2.22}/amati/_data/files/media-types.json +0 -0
  46. {amati-0.2.20 → amati-0.2.22}/amati/_data/files/schemes.json +0 -0
  47. {amati-0.2.20 → amati-0.2.22}/amati/_data/files/spdx-licences.json +0 -0
  48. {amati-0.2.20 → amati-0.2.22}/amati/_data/files/tlds.json +0 -0
  49. {amati-0.2.20 → amati-0.2.22}/amati/_data/http_status_code.py +0 -0
  50. {amati-0.2.20 → amati-0.2.22}/amati/_data/iso9110.py +0 -0
  51. {amati-0.2.20 → amati-0.2.22}/amati/_data/media_types.py +0 -0
  52. {amati-0.2.20 → amati-0.2.22}/amati/_data/refresh.py +0 -0
  53. {amati-0.2.20 → amati-0.2.22}/amati/_data/schemes.py +0 -0
  54. {amati-0.2.20 → amati-0.2.22}/amati/_data/spdx_licences.py +0 -0
  55. {amati-0.2.20 → amati-0.2.22}/amati/_data/tlds.py +0 -0
  56. {amati-0.2.20 → amati-0.2.22}/amati/_error_handler.py +0 -0
  57. {amati-0.2.20 → amati-0.2.22}/amati/_logging.py +0 -0
  58. {amati-0.2.20 → amati-0.2.22}/amati/_resolve_forward_references.py +0 -0
  59. {amati-0.2.20 → amati-0.2.22}/amati/exceptions.py +0 -0
  60. {amati-0.2.20 → amati-0.2.22}/amati/fields/__init__.py +0 -0
  61. {amati-0.2.20 → amati-0.2.22}/amati/fields/_custom_types.py +0 -0
  62. {amati-0.2.20 → amati-0.2.22}/amati/fields/commonmark.py +0 -0
  63. {amati-0.2.20 → amati-0.2.22}/amati/fields/http_status_codes.py +0 -0
  64. {amati-0.2.20 → amati-0.2.22}/amati/fields/json.py +0 -0
  65. {amati-0.2.20 → amati-0.2.22}/amati/fields/media.py +0 -0
  66. {amati-0.2.20 → amati-0.2.22}/amati/fields/oas.py +0 -0
  67. {amati-0.2.20 → amati-0.2.22}/amati/grammars/oas.py +0 -0
  68. {amati-0.2.20 → amati-0.2.22}/amati/grammars/rfc6901.py +0 -0
  69. {amati-0.2.20 → amati-0.2.22}/amati/grammars/rfc7159.py +0 -0
  70. /amati-0.2.20/amati/validators/__init__.py → /amati-0.2.22/amati/py.typed +0 -0
  71. {amati-0.2.20/tests → amati-0.2.22/amati/validators}/__init__.py +0 -0
  72. {amati-0.2.20 → amati-0.2.22}/scripts/setup_test_specs.py +0 -0
  73. {amati-0.2.20/tests/fields → amati-0.2.22/tests}/__init__.py +0 -0
  74. {amati-0.2.20 → amati-0.2.22}/tests/data/.amati.tests.yaml +0 -0
  75. {amati-0.2.20 → amati-0.2.22}/tests/data/api.github.com.yaml.errors.json +0 -0
  76. {amati-0.2.20 → amati-0.2.22}/tests/data/next-api.github.com.yaml.errors.json +0 -0
  77. {amati-0.2.20 → amati-0.2.22}/tests/data/openapi.yaml +0 -0
  78. {amati-0.2.20 → amati-0.2.22}/tests/data/openapi.yaml.gz +0 -0
  79. {amati-0.2.20 → amati-0.2.22}/tests/data/redocly.openapi.yaml.errors.json +0 -0
  80. {amati-0.2.20/tests/validators → amati-0.2.22/tests/fields}/__init__.py +0 -0
  81. {amati-0.2.20 → amati-0.2.22}/tests/fields/test_email.py +0 -0
  82. {amati-0.2.20 → amati-0.2.22}/tests/fields/test_http_status_codes.py +0 -0
  83. {amati-0.2.20 → amati-0.2.22}/tests/fields/test_iso9110.py +0 -0
  84. {amati-0.2.20 → amati-0.2.22}/tests/fields/test_media.py +0 -0
  85. {amati-0.2.20 → amati-0.2.22}/tests/fields/test_oas.py +0 -0
  86. {amati-0.2.20 → amati-0.2.22}/tests/fields/test_spdx_licences.py +0 -0
  87. {amati-0.2.20 → amati-0.2.22}/tests/helpers.py +0 -0
  88. {amati-0.2.20 → amati-0.2.22}/tests/model_validators/test_if_then.py +0 -0
  89. {amati-0.2.20 → amati-0.2.22}/tests/model_validators/test_only_one.py +0 -0
  90. {amati-0.2.20 → amati-0.2.22}/tests/test_amati.py +0 -0
  91. {amati-0.2.20 → amati-0.2.22}/tests/test_external_specs.py +0 -0
  92. {amati-0.2.20 → amati-0.2.22}/tests/test_logging.py +0 -0
  93. {amati-0.2.20 → amati-0.2.22}/tests/validators/test_generic.py +0 -0
  94. {amati-0.2.20 → amati-0.2.22}/tests/validators/test_licence_object.py +0 -0
  95. {amati-0.2.20 → amati-0.2.22}/tests/validators/test_security_scheme_object.py +0 -0
  96. {amati-0.2.20 → amati-0.2.22}/tests/validators/test_server_variable_object.py +0 -0
@@ -13,6 +13,7 @@ jobs:
13
13
  permissions:
14
14
  pull-requests: write
15
15
  contents: write
16
+ actions: read
16
17
  steps:
17
18
  - name: Harden the runner (Audit all outbound calls)
18
19
  uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
@@ -85,6 +86,7 @@ jobs:
85
86
  uses: py-cov-action/python-coverage-comment-action@d3db80fe08c357443fa62f4ce5ce4c753d61eba8 # v3
86
87
  with:
87
88
  GITHUB_TOKEN: ${{ secrets.BOT_COMMENT_TOKEN }}
89
+ continue-on-error: true
88
90
 
89
91
  - name: Store Pull Request comment to be posted
90
92
  if: steps.check_changes.outputs.relevant == 'true'
@@ -13,6 +13,10 @@ on:
13
13
  - cron: '20 7 * * 2'
14
14
  push:
15
15
  branches: ["main"]
16
+ paths-ignore:
17
+ - 'pyproject.toml'
18
+ - 'uv.lock'
19
+
16
20
 
17
21
  # Declare default permissions as read only.
18
22
  permissions: read-all
@@ -136,4 +136,7 @@ cython_debug/
136
136
  .amati/*
137
137
 
138
138
  # ruff
139
- .ruff_cache/
139
+ .ruff_cache/
140
+
141
+ # sonar
142
+ .sonar/
@@ -1,7 +1,7 @@
1
1
  repos:
2
2
  - repo: https://github.com/astral-sh/ruff-pre-commit
3
3
  # Ruff version.
4
- rev: v0.13.2
4
+ rev: v0.14.0
5
5
  hooks:
6
6
  # Run the linter.
7
7
  - id: ruff-check
@@ -9,10 +9,11 @@ repos:
9
9
  # Run the formatter.
10
10
  - id: ruff-format
11
11
  - repo: https://github.com/gitleaks/gitleaks
12
- rev: v8.16.3
12
+ rev: v8.28.0
13
13
  hooks:
14
14
  - id: gitleaks
15
- - repo: https://github.com/jumanjihouse/pre-commit-hooks
16
- rev: 3.0.0
15
+ - repo: https://github.com/koalaman/shellcheck-precommit
16
+ rev: v0.7.2
17
17
  hooks:
18
18
  - id: shellcheck
19
+ args: [-x]
@@ -10,7 +10,8 @@ COPY amati/ amati/
10
10
 
11
11
  RUN uv lock \
12
12
  && uv sync --locked --no-dev \
13
- && adduser --disabled-password --gecos '' appuser && chown -R appuser /app
13
+ && adduser --disabled-password --gecos '' appuser \
14
+ && chown -R appuser /app
14
15
 
15
16
  USER appuser
16
17
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: amati
3
- Version: 0.2.20
3
+ Version: 0.2.22
4
4
  Summary: Validates that a .yaml or .json file conforms to the OpenAPI Specifications 3.x.
5
5
  Project-URL: Homepage, https://github.com/ben-alexander/amati
6
6
  Project-URL: Issues, https://github.com/ben-alexander/amati/issues
@@ -118,6 +118,7 @@ This project uses:
118
118
  * [Ruff](https://docs.astral.sh/ruff/) as a linter and formatter
119
119
  * [Hypothesis](https://hypothesis.readthedocs.io/en/latest/index.html) for test data generation
120
120
  * [Coverage](https://coverage.readthedocs.io/en/7.6.8/) on both the tests and code for test coverage
121
+ * [Shellcheck](https://github.com/koalaman/shellcheck/wiki) for as SAST for shell scripts
121
122
 
122
123
  It's expected that there are no errors and 100% of the code is reached and executed. The strategy for test coverage is based on parsing test specifications and not unit tests.
123
124
  amati runs tests on the external specifications, detailed in `tests/data/.amati.tests.yaml`. To be able to run these tests the GitHub repos containing the specifications need to be available locally. Specific revisions of the repos can be downloaded by running the following, which will clone the repos into `.amati/amati-tests-specs/<repo-name>`.
@@ -91,6 +91,7 @@ This project uses:
91
91
  * [Ruff](https://docs.astral.sh/ruff/) as a linter and formatter
92
92
  * [Hypothesis](https://hypothesis.readthedocs.io/en/latest/index.html) for test data generation
93
93
  * [Coverage](https://coverage.readthedocs.io/en/7.6.8/) on both the tests and code for test coverage
94
+ * [Shellcheck](https://github.com/koalaman/shellcheck/wiki) for as SAST for shell scripts
94
95
 
95
96
  It's expected that there are no errors and 100% of the code is reached and executed. The strategy for test coverage is based on parsing test specifications and not unit tests.
96
97
  amati runs tests on the external specifications, detailed in `tests/data/.amati.tests.yaml`. To be able to run these tests the GitHub repos containing the specifications need to be available locally. Specific revisions of the repos can be downloaded by running the following, which will clone the repos into `.amati/amati-tests-specs/<repo-name>`.
@@ -92,7 +92,7 @@ def run(
92
92
  consistency_check: bool = False,
93
93
  local: bool = False,
94
94
  html_report: bool = False,
95
- ):
95
+ ) -> bool:
96
96
  """
97
97
  Runs the full amati process on a specific specification file.
98
98
 
@@ -220,7 +220,7 @@ def discover(spec: str, discover_dir: str = ".") -> list[Path]:
220
220
  if __name__ == "__main__":
221
221
  import argparse
222
222
 
223
- parser = argparse.ArgumentParser(
223
+ parser: argparse.ArgumentParser = argparse.ArgumentParser(
224
224
  prog="amati",
225
225
  description="""
226
226
  Tests whether a OpenAPI specification is valid. Will look an openapi.json
@@ -282,7 +282,7 @@ if __name__ == "__main__":
282
282
  "or , media types from IANA",
283
283
  )
284
284
 
285
- args = parser.parse_args()
285
+ args: argparse.Namespace = parser.parse_args()
286
286
 
287
287
  logger.remove() # Remove the default logger
288
288
  # Add a new logger that outputs to stderr with a specific format
@@ -301,15 +301,19 @@ if __name__ == "__main__":
301
301
  sys.exit(1)
302
302
 
303
303
  try:
304
- specifications = discover(args.spec, args.discover)
304
+ specifications: list[Path] = discover(args.spec, args.discover)
305
305
  except Exception as e:
306
306
  logger.error(str(e))
307
307
  sys.exit(1)
308
308
 
309
+ specification: Path
309
310
  for specification in specifications:
310
- successful_check = False
311
+ successful_check: bool = False
311
312
  logger.info(f"Processing specification {specification}")
312
313
 
314
+ # Top-level try/except to ensure one failed spec doesn't stop the rest
315
+ # from being processed.
316
+ e: Exception
313
317
  try:
314
318
  successful_check = run(
315
319
  specification, args.consistency_check, args.local, args.html_report
@@ -0,0 +1,40 @@
1
+ """
2
+ Validates an email according to the RFC5322 ABNF grammar - §3:
3
+ """
4
+
5
+ from abnf import ParseError
6
+ from abnf.grammars import rfc5322
7
+
8
+ from amati import AmatiValueError
9
+ from amati.fields import Str as _Str
10
+
11
+ reference_uri = "https://www.rfc-editor.org/rfc/rfc5322#section-3"
12
+
13
+
14
+ class Email(_Str):
15
+ """A string subclass representing a validated RFC 5322 email address.
16
+
17
+ This class ensures that email addresses conform to the RFC 5322 specification
18
+ by validating the input during initialization. Invalid addresses raise an
19
+ AmatiValueError.
20
+
21
+ Args:
22
+ value: The email address string to validate.
23
+
24
+ Raises:
25
+ AmatiValueError: If the value is not a valid RFC 5322 email address.
26
+
27
+ Example:
28
+ >>> email = Email("user@example.com")
29
+ >>> invalid = Email("not-an-email")
30
+ Traceback (most recent call last):
31
+ amati.exceptions.AmatiValueError: message
32
+ """
33
+
34
+ def __init__(self, value: str):
35
+ try:
36
+ rfc5322.Rule("address").parse_all(value)
37
+ except ParseError as e:
38
+ raise AmatiValueError(
39
+ f"{value} is not a valid email address", reference_uri
40
+ ) from e
@@ -16,7 +16,7 @@ reference_uri = (
16
16
  )
17
17
 
18
18
 
19
- data = cast(list[dict[str, str]], get("iso9110"))
19
+ data: list[dict[str, str]] = cast(list[dict[str, str]], get("iso9110"))
20
20
 
21
21
 
22
22
  HTTP_AUTHENTICATION_SCHEMES: set[str] = {
@@ -11,7 +11,7 @@ from amati.fields.uri import URI
11
11
 
12
12
  reference_uri = "https://spdx.org/licenses/"
13
13
 
14
- data = cast(list[dict[str, Any]], get("spdx_licences"))
14
+ data: list[dict[str, Any]] = cast(list[dict[str, Any]], get("spdx_licences"))
15
15
 
16
16
  # `seeAlso` is the list of URLs associated with each licence
17
17
  VALID_LICENCES: dict[str, list[str]] = {
@@ -65,9 +65,22 @@ class Scheme(_Str):
65
65
 
66
66
 
67
67
  class URIType(str, Enum):
68
+ """Enumeration of URI reference types.
69
+
70
+ Categorizes different types of URI references as defined in RFC 3986,
71
+ along with JSON Pointer references from RFC 6901.
72
+
73
+ Attributes:
74
+ ABSOLUTE: A URI with a scheme component (e.g., "https://example.com/path").
75
+ RELATIVE: A relative reference without a scheme (e.g., "../path/file.json").
76
+ NETWORK_PATH: A network path reference starting with "//"
77
+ (e.g., "//example.com").
78
+ JSON_POINTER: A JSON Pointer as defined in RFC 6901 (e.g., "#/foo/bar/0").
79
+ """
80
+
68
81
  ABSOLUTE = "absolute"
69
82
  RELATIVE = "relative"
70
- NON_RELATIVE = "non-relative"
83
+ NETWORK_PATH = "network-path"
71
84
  JSON_POINTER = "JSON pointer"
72
85
 
73
86
 
@@ -152,7 +165,7 @@ class URI(_Str):
152
165
  if self.scheme:
153
166
  return URIType.ABSOLUTE
154
167
  if self.authority:
155
- return URIType.NON_RELATIVE
168
+ return URIType.NETWORK_PATH
156
169
  if self.path:
157
170
  if str(self).startswith("#"):
158
171
  return URIType.JSON_POINTER
@@ -33,16 +33,37 @@ type JSONValue = JSONPrimitive | JSONArray | JSONObject
33
33
 
34
34
 
35
35
  class FileLoader(ABC):
36
- """Abstract base class for file loaders."""
36
+ """Abstract base class for file loaders.
37
+
38
+ Defines the interface for loading and parsing files of different formats.
39
+ Implementations should provide format-specific handling logic.
40
+ """
37
41
 
38
42
  @abstractmethod
39
43
  def can_handle(self, file_path: Path) -> bool:
40
- """Check if this loader can handle the given file."""
44
+ """Check if this loader can handle the given file.
45
+
46
+ Args:
47
+ file_path: Path to the file to check.
48
+
49
+ Returns:
50
+ True if this loader can handle the file, False otherwise.
51
+ """
41
52
  pass
42
53
 
43
54
  @abstractmethod
44
55
  def load(self, content: str) -> JSONObject:
45
- """Load and parse the file content."""
56
+ """Load and parse the file content.
57
+
58
+ Args:
59
+ content: The raw file content as a string.
60
+
61
+ Returns:
62
+ The parsed content as a JSONObject.
63
+
64
+ Raises:
65
+ May raise implementation-specific exceptions for parsing errors.
66
+ """
46
67
  pass
47
68
 
48
69
 
@@ -50,9 +71,28 @@ class JSONLoader(FileLoader):
50
71
  """Loader for JSON files."""
51
72
 
52
73
  def can_handle(self, file_path: Path) -> bool:
74
+ """Check if this loader can handle the given file.
75
+
76
+ Args:
77
+ file_path: Path to the file to check.
78
+
79
+ Returns:
80
+ True if this loader can handle the file, False otherwise.
81
+ """
53
82
  return file_path.suffix.lower() in {".json", ".js"}
54
83
 
55
84
  def load(self, content: str) -> JSONObject:
85
+ """Load and parse the file content.
86
+
87
+ Args:
88
+ content: The raw file content as a string.
89
+
90
+ Returns:
91
+ The parsed content as a JSONObject.
92
+
93
+ Raises:
94
+ May raise implementation-specific exceptions for parsing errors.
95
+ """
56
96
  return json.loads(content)
57
97
 
58
98
 
@@ -60,21 +100,56 @@ class YAMLLoader(FileLoader):
60
100
  """Loader for YAML files."""
61
101
 
62
102
  def can_handle(self, file_path: Path) -> bool:
103
+ """Check if this loader can handle the given file.
104
+
105
+ Args:
106
+ file_path: Path to the file to check.
107
+
108
+ Returns:
109
+ True if this loader can handle the file, False otherwise.
110
+ """
63
111
  return file_path.suffix.lower() in {".yaml", ".yml"}
64
112
 
65
113
  def load(self, content: str) -> JSONObject:
114
+ """Load and parse the file content.
115
+
116
+ Args:
117
+ content: The raw file content as a string.
118
+
119
+ Returns:
120
+ The parsed content as a JSONObject.
121
+
122
+ Raises:
123
+ May raise implementation-specific exceptions for parsing errors.
124
+ """
66
125
  return yaml.safe_load(content)
67
126
 
68
127
 
69
128
  class FileProcessor:
70
- """Main processor for handling gzipped and regular files."""
129
+ """Main processor for handling gzipped and regular files.
130
+
131
+ Processes files in various formats (JSON, YAML) with optional gzip compression.
132
+ Automatically detects compression and selects the appropriate loader based on
133
+ file extension.
134
+
135
+ Attributes:
136
+ loaders: List of available file loaders for different formats.
137
+ """
71
138
 
72
139
  def __init__(self) -> None:
140
+ """Initialize the FileProcessor with default loaders."""
73
141
  self.loaders: list[FileLoader] = [JSONLoader(), YAMLLoader()]
74
142
 
75
143
  @staticmethod
76
144
  def _is_gzip_file(file_path: Path) -> bool:
77
- """Check if file is gzipped by reading magic bytes."""
145
+ """Check if file is gzipped by reading magic bytes.
146
+
147
+ Args:
148
+ file_path: Path to the file to check.
149
+
150
+ Returns:
151
+ True if the file is gzip-compressed, False otherwise.
152
+ """
78
153
  try:
79
154
  with open(file_path, "rb") as f:
80
155
  magic = f.read(2)
@@ -84,13 +159,32 @@ class FileProcessor:
84
159
 
85
160
  @staticmethod
86
161
  def _get_decompressed_path(file_path: Path) -> Path:
87
- """Get the path without .gz extension for determining file type."""
162
+ """Get the path without .gz extension for determining file type.
163
+
164
+ Args:
165
+ file_path: Path to the potentially compressed file.
166
+
167
+ Returns:
168
+ The file path with .gz extension removed if present, otherwise
169
+ the original path unchanged.
170
+ """
88
171
  if file_path.suffix.lower() == ".gz":
89
172
  return file_path.with_suffix("")
90
173
  return file_path
91
174
 
92
175
  def _read_file_content(self, file_path: Path) -> str:
93
- """Read file content, decompressing if necessary."""
176
+ """Read file content, decompressing if necessary.
177
+
178
+ Args:
179
+ file_path: Path to the file to read.
180
+
181
+ Returns:
182
+ The file content as a UTF-8 encoded string.
183
+
184
+ Raises:
185
+ OSError: If the file cannot be read.
186
+ gzip.BadGzipFile: If the file appears to be gzipped but is corrupted.
187
+ """
94
188
  if self._is_gzip_file(file_path):
95
189
  with gzip.open(file_path, "rt", encoding="utf-8") as f:
96
190
  return f.read()
@@ -99,7 +193,20 @@ class FileProcessor:
99
193
  return f.read()
100
194
 
101
195
  def _get_appropriate_loader(self, file_path: Path) -> FileLoader:
102
- """Get the appropriate loader for the file type."""
196
+ """Get the appropriate loader for the file type.
197
+
198
+ Determines the correct loader based on the file extension, ignoring
199
+ any .gz compression extension.
200
+
201
+ Args:
202
+ file_path: Path to the file needing a loader.
203
+
204
+ Returns:
205
+ The appropriate FileLoader instance for the file type.
206
+
207
+ Raises:
208
+ ValueError: If no suitable loader is found for the file type.
209
+ """
103
210
  # Use the decompressed path to determine file type
104
211
  target_path = self._get_decompressed_path(file_path)
105
212
 
@@ -21,15 +21,15 @@ class UnknownValue:
21
21
 
22
22
  _instance = None
23
23
 
24
- def __new__(cls):
24
+ def __new__(cls) -> "UnknownValue":
25
25
  if cls._instance is None:
26
26
  cls._instance = super().__new__(cls)
27
27
  return cls._instance
28
28
 
29
- def __repr__(self): # pragma: no cover
29
+ def __repr__(self) -> str: # pragma: no cover
30
30
  return "UNKNOWN"
31
31
 
32
- def __str__(self): # pragma: no cover
32
+ def __str__(self) -> str: # pragma: no cover
33
33
  return "UNKNOWN"
34
34
 
35
35
 
@@ -20,15 +20,30 @@ from amati._logging import Logger
20
20
 
21
21
 
22
22
  class GenericObject(BaseModel):
23
- """
24
- A generic model to overwrite provide extra functionality
25
- to pydantic.BaseModel.
23
+ """A generic model extending Pydantic BaseModel with enhanced validation.
24
+
25
+ Provides additional functionality for handling extra fields, including pattern
26
+ matching validation and detailed logging of invalid fields. This class validates
27
+ extra fields against optional regex patterns and logs violations without raising
28
+ exceptions.
29
+
30
+ Attributes:
31
+ _reference_uri: URI reference for error reporting and documentation.
32
+ _extra_field_pattern: Optional regex pattern to validate extra field names.
26
33
  """
27
34
 
28
35
  _reference_uri: ClassVar[str] = PrivateAttr()
29
36
  _extra_field_pattern: re.Pattern[str] | None = PrivateAttr()
30
37
 
31
38
  def __init__(self, **data: Any) -> None:
39
+ """Initialize the model and validate extra fields.
40
+
41
+ Logs any fields that are not recognized as valid model fields or aliases
42
+ when extra fields are not allowed by the model configuration.
43
+
44
+ Args:
45
+ **data: Arbitrary keyword arguments representing model data.
46
+ """
32
47
  super().__init__(**data)
33
48
 
34
49
  if self.model_config.get("extra") == "allow":
@@ -53,6 +68,15 @@ class GenericObject(BaseModel):
53
68
  )
54
69
 
55
70
  def model_post_init(self, __context: Any) -> None:
71
+ """Validate extra fields against the configured pattern after initialization.
72
+
73
+ If an extra field pattern is configured, checks all extra fields against
74
+ the pattern and logs any fields that don't match. This allows for flexible
75
+ validation of dynamically named fields.
76
+
77
+ Args:
78
+ __context: Pydantic context object passed during initialization.
79
+ """
56
80
  if not self.model_extra:
57
81
  return
58
82
 
@@ -85,12 +109,14 @@ class GenericObject(BaseModel):
85
109
  )
86
110
 
87
111
  def get_field_aliases(self) -> list[str]:
88
- """
89
- Gets a list of aliases for confirming whether extra
90
- fields are allowed.
112
+ """Get all field aliases defined for the model.
113
+
114
+ Collects aliases from all model fields to help validate whether provided
115
+ field names are valid, even if they use alias names instead of field names.
91
116
 
92
117
  Returns:
93
- A list of field aliases for the class.
118
+ A list of all field aliases defined in the model. Empty list if no
119
+ aliases are defined.
94
120
  """
95
121
 
96
122
  aliases: list[str] = []
@@ -94,7 +94,9 @@ class ReferenceObject(GenericObject):
94
94
  as per RFC6901.
95
95
  """
96
96
 
97
- model_config = ConfigDict(extra="forbid", populate_by_name=True)
97
+ model_config: ClassVar[ConfigDict] = ConfigDict(
98
+ extra="forbid", populate_by_name=True
99
+ )
98
100
 
99
101
  ref: URI = Field(alias="$ref")
100
102
  _reference_uri: ClassVar[str] = (
@@ -225,7 +227,7 @@ class ExternalDocumentationObject(GenericObject):
225
227
  class PathsObject(GenericObject):
226
228
  """Validates the OpenAPI Specification paths object - §4.8.8"""
227
229
 
228
- model_config = ConfigDict(extra="allow")
230
+ model_config: ClassVar[ConfigDict] = ConfigDict(extra="allow")
229
231
 
230
232
  @model_validator(mode="before")
231
233
  @classmethod
@@ -403,7 +405,7 @@ class ResponsesObject(GenericObject):
403
405
  Validates the OpenAPI Specification responses object - §4.8.16
404
406
  """
405
407
 
406
- model_config = ConfigDict(
408
+ model_config: ClassVar[ConfigDict] = ConfigDict(
407
409
  extra="allow",
408
410
  )
409
411
 
@@ -498,7 +500,7 @@ class CallbackObject(GenericObject):
498
500
  Validates the OpenAPI Specification callback object - §4.8.18
499
501
  """
500
502
 
501
- model_config = ConfigDict(extra="allow")
503
+ model_config: ClassVar[ConfigDict] = ConfigDict(extra="allow")
502
504
 
503
505
  # The keys are runtime expressions that resolve to a URL
504
506
  # The values are Response Objects or Reference Objects
@@ -664,7 +666,7 @@ class SchemaObject(GenericObject):
664
666
  and validated through jsonschema.
665
667
  """
666
668
 
667
- model_config = ConfigDict(
669
+ model_config: ClassVar[ConfigDict] = ConfigDict(
668
670
  populate_by_name=True,
669
671
  extra="allow", # Allow all standard JSON Schema fields
670
672
  )
@@ -142,7 +142,9 @@ class ReferenceObject(GenericObject):
142
142
  as per RFC6901.
143
143
  """
144
144
 
145
- model_config = ConfigDict(extra="forbid", populate_by_name=True)
145
+ model_config: ClassVar[ConfigDict] = ConfigDict(
146
+ extra="forbid", populate_by_name=True
147
+ )
146
148
 
147
149
  ref: URI = Field(alias="$ref")
148
150
  summary: str | None
@@ -336,7 +338,7 @@ class SchemaObject(GenericObject):
336
338
  and validated through jsonschema.
337
339
  """
338
340
 
339
- model_config = ConfigDict(
341
+ model_config: ClassVar[ConfigDict] = ConfigDict(
340
342
  populate_by_name=True,
341
343
  extra="allow", # Allow all standard JSON Schema fields
342
344
  )
@@ -1,9 +1,11 @@
1
+ #!/bin/sh
1
2
  ruff check --fix
2
3
  ruff format
3
4
  python scripts/setup_test_specs.py
4
5
  pytest --cov-report term-missing --cov=amati tests
5
6
  pytest --doctest-modules amati/
7
+ pyright --verifytypes amati --ignoreexternal
6
8
  docker build -t amati -f Dockerfile .
7
- cd tests/
9
+ cd tests/ || exit
8
10
  docker run -v "$(pwd):/data" amati -d /data --consistency-check
9
- cd ../
11
+ cd ..
@@ -0,0 +1,5 @@
1
+ #!/bin/sh
2
+ uv python install
3
+ . .venv/bin/activate
4
+ uv sync
5
+ pre-commit install
@@ -1,11 +1,12 @@
1
+ #!/bin/sh
1
2
  current_branch=$(git rev-parse --abbrev-ref HEAD)
2
3
 
3
4
  git checkout main
4
5
  git pull origin main
5
- git checkout $current_branch
6
+ git checkout "$current_branch"
6
7
  git merge main
7
8
  git checkout --theirs uv.lock
8
9
  uv lock
9
10
  git add uv.lock
10
11
  git commit uv.lock -m "chore: upgrade dependencies"
11
- git push origin $current_branch
12
+ git push origin "$current_branch"
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "amati"
3
- version = "0.2.20"
3
+ version = "0.2.22"
4
4
  description = "Validates that a .yaml or .json file conforms to the OpenAPI Specifications 3.x."
5
5
  readme = "README.md"
6
6
  authors = [
@@ -45,6 +45,7 @@ build-backend = "hatchling.build"
45
45
  dev = [
46
46
  "hypothesis>=6.131.28",
47
47
  "pre-commit>=4.2.0",
48
+ "pyright>=1.1.406",
48
49
  "pytest>=8.3.5",
49
50
  "pytest-cov>=6.1.1",
50
51
  "ruff>=0.12.1",
@@ -10,6 +10,6 @@
10
10
  "input": {
11
11
  "$ref": "description.yml#/introduction"
12
12
  },
13
- "url": "https://errors.pydantic.dev/2.11/v/string_type"
13
+ "url": "https://errors.pydantic.dev/2.12/v/string_type"
14
14
  }
15
15
  ]
@@ -159,7 +159,7 @@ def test_uri_non_relative(value: str):
159
159
 
160
160
  result = URI(candidate)
161
161
  assert result == candidate
162
- assert result.type == URIType.NON_RELATIVE
162
+ assert result.type == URIType.NETWORK_PATH
163
163
  assert result.is_iri == ("xn--" in candidate.lower())
164
164
 
165
165
 
@@ -170,7 +170,7 @@ def test_iri_non_relative(value: str):
170
170
 
171
171
  result = URI(candidate)
172
172
  assert result == candidate
173
- assert result.type == URIType.NON_RELATIVE
173
+ assert result.type == URIType.NETWORK_PATH
174
174
  assert result.is_iri is True
175
175
 
176
176