amati 0.3.13__tar.gz → 0.3.15__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. {amati-0.3.13 → amati-0.3.15}/.github/workflows/checks.yaml +1 -1
  2. {amati-0.3.13 → amati-0.3.15}/.github/workflows/codeql.yml +2 -2
  3. {amati-0.3.13 → amati-0.3.15}/.github/workflows/coverage.yaml +1 -1
  4. {amati-0.3.13 → amati-0.3.15}/.github/workflows/data-refresh.yaml +1 -1
  5. {amati-0.3.13 → amati-0.3.15}/.github/workflows/publish.yaml +1 -1
  6. {amati-0.3.13 → amati-0.3.15}/.github/workflows/scorecards.yml +1 -1
  7. {amati-0.3.13 → amati-0.3.15}/.github/workflows/tag-and-create-release.yaml +1 -1
  8. {amati-0.3.13 → amati-0.3.15}/.pre-commit-config.yaml +2 -2
  9. {amati-0.3.13 → amati-0.3.15}/PKG-INFO +1 -1
  10. {amati-0.3.13 → amati-0.3.15}/amati/_data/files/spdx-licences.json +21 -0
  11. {amati-0.3.13 → amati-0.3.15}/amati/_data/files/tlds.json +1 -0
  12. amati-0.3.15/amati/_error_handler.py +48 -0
  13. amati-0.3.15/amati/_references.py +226 -0
  14. {amati-0.3.13 → amati-0.3.15}/amati/_resolve_forward_references.py +36 -60
  15. {amati-0.3.13 → amati-0.3.15}/amati/amati.py +109 -16
  16. {amati-0.3.13 → amati-0.3.15}/amati/exceptions.py +1 -1
  17. {amati-0.3.13 → amati-0.3.15}/amati/fields/email.py +1 -1
  18. {amati-0.3.13 → amati-0.3.15}/amati/fields/http_status_codes.py +2 -1
  19. {amati-0.3.13 → amati-0.3.15}/amati/fields/iso9110.py +2 -1
  20. {amati-0.3.13 → amati-0.3.15}/amati/fields/media.py +2 -1
  21. {amati-0.3.13 → amati-0.3.15}/amati/fields/oas.py +1 -1
  22. {amati-0.3.13 → amati-0.3.15}/amati/fields/spdx_licences.py +2 -1
  23. {amati-0.3.13 → amati-0.3.15}/amati/fields/uri.py +2 -1
  24. amati-0.3.15/amati/validators/_discriminators.py +8 -0
  25. {amati-0.3.13 → amati-0.3.15}/amati/validators/generic.py +24 -16
  26. {amati-0.3.13 → amati-0.3.15}/amati/validators/oas304.py +122 -50
  27. {amati-0.3.13 → amati-0.3.15}/amati/validators/oas311.py +92 -37
  28. {amati-0.3.13 → amati-0.3.15}/pyproject.toml +1 -1
  29. {amati-0.3.13 → amati-0.3.15}/scripts/setup_test_specs.py +5 -5
  30. {amati-0.3.13 → amati-0.3.15}/tests/data/.amati.tests.yaml +27 -13
  31. amati-0.3.13/tests/data/redocly.openapi.yaml.errors.json → amati-0.3.15/tests/data/api.github.com.json.errors.json +0 -9
  32. {amati-0.3.13 → amati-0.3.15}/tests/data/api.github.com.yaml.errors.json +0 -9
  33. amati-0.3.15/tests/data/discourse.yml.errors.json +11 -0
  34. amati-0.3.15/tests/data/next-api.github.com.json.errors.json +1439 -0
  35. {amati-0.3.13 → amati-0.3.15}/tests/fields/test_http_status_codes.py +2 -2
  36. {amati-0.3.13 → amati-0.3.15}/tests/fields/test_uri.py +1 -35
  37. {amati-0.3.13 → amati-0.3.15}/tests/model_validators/test_all_of.py +1 -1
  38. {amati-0.3.13 → amati-0.3.15}/tests/model_validators/test_at_least_one.py +1 -1
  39. {amati-0.3.13 → amati-0.3.15}/tests/model_validators/test_only_one.py +1 -1
  40. amati-0.3.15/tests/references/test_uri_collector_mixin.py +306 -0
  41. amati-0.3.15/tests/references/test_uri_reference.py +323 -0
  42. amati-0.3.15/tests/references/test_uri_registry.py +317 -0
  43. amati-0.3.15/tests/strategies.py +141 -0
  44. {amati-0.3.13 → amati-0.3.15}/tests/test_external_specs.py +9 -7
  45. {amati-0.3.13 → amati-0.3.15}/tests/validators/test_generic.py +9 -7
  46. {amati-0.3.13 → amati-0.3.15}/tests/validators/test_licence_object.py +2 -2
  47. {amati-0.3.13 → amati-0.3.15}/tests/validators/test_security_scheme_object.py +1 -1
  48. {amati-0.3.13 → amati-0.3.15}/tests/validators/test_server_variable_object.py +7 -3
  49. {amati-0.3.13 → amati-0.3.15}/uv.lock +35 -35
  50. amati-0.3.13/amati/_error_handler.py +0 -48
  51. amati-0.3.13/tests/data/discourse.yml.errors.json +0 -1
  52. amati-0.3.13/tests/data/next-api.github.com.yaml.errors.json +0 -32
  53. amati-0.3.13/tests/helpers.py +0 -51
  54. {amati-0.3.13 → amati-0.3.15}/.dockerignore +0 -0
  55. {amati-0.3.13 → amati-0.3.15}/.github/actions/setup/action.yaml +0 -0
  56. {amati-0.3.13 → amati-0.3.15}/.github/dependabot.yml +0 -0
  57. {amati-0.3.13 → amati-0.3.15}/.github/workflows/dependency-review.yml +0 -0
  58. {amati-0.3.13 → amati-0.3.15}/.gitignore +0 -0
  59. {amati-0.3.13 → amati-0.3.15}/.python-version +0 -0
  60. {amati-0.3.13 → amati-0.3.15}/Dockerfile +0 -0
  61. {amati-0.3.13 → amati-0.3.15}/LICENSE +0 -0
  62. {amati-0.3.13 → amati-0.3.15}/README.md +0 -0
  63. {amati-0.3.13 → amati-0.3.15}/SECURITY.md +0 -0
  64. {amati-0.3.13 → amati-0.3.15}/TEMPLATE.html +0 -0
  65. {amati-0.3.13 → amati-0.3.15}/amati/__init__.py +0 -0
  66. {amati-0.3.13 → amati-0.3.15}/amati/_data/files/http-status-codes.json +0 -0
  67. {amati-0.3.13 → amati-0.3.15}/amati/_data/files/iso9110.json +0 -0
  68. {amati-0.3.13 → amati-0.3.15}/amati/_data/files/media-types.json +0 -0
  69. {amati-0.3.13 → amati-0.3.15}/amati/_data/files/schemes.json +0 -0
  70. {amati-0.3.13 → amati-0.3.15}/amati/_data/http_status_code.py +0 -0
  71. {amati-0.3.13 → amati-0.3.15}/amati/_data/iso9110.py +0 -0
  72. {amati-0.3.13 → amati-0.3.15}/amati/_data/media_types.py +0 -0
  73. {amati-0.3.13 → amati-0.3.15}/amati/_data/refresh.py +0 -0
  74. {amati-0.3.13 → amati-0.3.15}/amati/_data/schemes.py +0 -0
  75. {amati-0.3.13 → amati-0.3.15}/amati/_data/spdx_licences.py +0 -0
  76. {amati-0.3.13 → amati-0.3.15}/amati/_data/tlds.py +0 -0
  77. {amati-0.3.13 → amati-0.3.15}/amati/_logging.py +0 -0
  78. {amati-0.3.13 → amati-0.3.15}/amati/fields/__init__.py +0 -0
  79. {amati-0.3.13 → amati-0.3.15}/amati/fields/_custom_types.py +0 -0
  80. {amati-0.3.13 → amati-0.3.15}/amati/fields/commonmark.py +0 -0
  81. {amati-0.3.13 → amati-0.3.15}/amati/fields/json.py +0 -0
  82. {amati-0.3.13 → amati-0.3.15}/amati/file_handler.py +0 -0
  83. {amati-0.3.13 → amati-0.3.15}/amati/grammars/oas.py +0 -0
  84. {amati-0.3.13 → amati-0.3.15}/amati/grammars/rfc6901.py +0 -0
  85. {amati-0.3.13 → amati-0.3.15}/amati/grammars/rfc7159.py +0 -0
  86. {amati-0.3.13 → amati-0.3.15}/amati/model_validators.py +0 -0
  87. {amati-0.3.13 → amati-0.3.15}/amati/py.typed +0 -0
  88. {amati-0.3.13 → amati-0.3.15}/amati/validators/__init__.py +0 -0
  89. {amati-0.3.13 → amati-0.3.15}/bin/checks.sh +0 -0
  90. {amati-0.3.13 → amati-0.3.15}/bin/startup.sh +0 -0
  91. {amati-0.3.13 → amati-0.3.15}/bin/upgrade-python.sh +0 -0
  92. {amati-0.3.13 → amati-0.3.15}/bin/uv-upgrade-from-main.sh +0 -0
  93. {amati-0.3.13 → amati-0.3.15}/tests/__init__.py +0 -0
  94. {amati-0.3.13 → amati-0.3.15}/tests/data/DigitalOcean-public.v2.errors.json +0 -0
  95. {amati-0.3.13 → amati-0.3.15}/tests/data/invalid-openapi.yaml +0 -0
  96. {amati-0.3.13 → amati-0.3.15}/tests/data/openapi.yaml +0 -0
  97. {amati-0.3.13 → amati-0.3.15}/tests/data/openapi.yaml.gz +0 -0
  98. {amati-0.3.13 → amati-0.3.15}/tests/fields/__init__.py +0 -0
  99. {amati-0.3.13 → amati-0.3.15}/tests/fields/test_email.py +0 -0
  100. {amati-0.3.13 → amati-0.3.15}/tests/fields/test_iso9110.py +0 -0
  101. {amati-0.3.13 → amati-0.3.15}/tests/fields/test_media.py +0 -0
  102. {amati-0.3.13 → amati-0.3.15}/tests/fields/test_oas.py +0 -0
  103. {amati-0.3.13 → amati-0.3.15}/tests/fields/test_spdx_licences.py +0 -0
  104. {amati-0.3.13 → amati-0.3.15}/tests/model_validators/test_if_then.py +0 -0
  105. {amati-0.3.13 → amati-0.3.15}/tests/test_amati.py +0 -0
  106. {amati-0.3.13 → amati-0.3.15}/tests/test_logging.py +0 -0
  107. {amati-0.3.13 → amati-0.3.15}/tests/validators/__init__.py +0 -0
@@ -77,7 +77,7 @@ jobs:
77
77
  - name: Coverage comment
78
78
  if: steps.check_changes.outputs.relevant == 'true'
79
79
  id: coverage_comment
80
- uses: py-cov-action/python-coverage-comment-action@39ffc771120970de615612f01a030260bcb45443 # v3
80
+ uses: py-cov-action/python-coverage-comment-action@14efb884fd6f322dca843a946ce2125a55c12e1d # v3
81
81
  with:
82
82
  GITHUB_TOKEN: ${{ secrets.BOT_COMMENT_TOKEN }}
83
83
  continue-on-error: true
@@ -73,7 +73,7 @@ jobs:
73
73
 
74
74
  # Initializes the CodeQL tools for scanning.
75
75
  - name: Initialize CodeQL
76
- uses: github/codeql-action/init@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
76
+ uses: github/codeql-action/init@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
77
77
  with:
78
78
  languages: ${{ matrix.language }}
79
79
  build-mode: ${{ matrix.build-mode }}
@@ -101,6 +101,6 @@ jobs:
101
101
  exit 1
102
102
 
103
103
  - name: Perform CodeQL Analysis
104
- uses: github/codeql-action/analyze@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
104
+ uses: github/codeql-action/analyze@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
105
105
  with:
106
106
  category: "/language:${{matrix.language}}"
@@ -24,7 +24,7 @@ jobs:
24
24
  egress-policy: audit
25
25
 
26
26
  - name: Post comment
27
- uses: py-cov-action/python-coverage-comment-action@39ffc771120970de615612f01a030260bcb45443 # v3
27
+ uses: py-cov-action/python-coverage-comment-action@14efb884fd6f322dca843a946ce2125a55c12e1d # v3
28
28
  with:
29
29
  GITHUB_TOKEN: ${{ secrets.BOT_COMMENT_TOKEN }}
30
30
  GITHUB_PR_RUN_ID: ${{ github.event.workflow_run.id }}
@@ -25,7 +25,7 @@ jobs:
25
25
  token: ${{ secrets.BOT_TOKEN }}
26
26
 
27
27
  - name: Set up uv
28
- uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
28
+ uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
29
29
  with:
30
30
  version: "latest"
31
31
 
@@ -24,7 +24,7 @@ jobs:
24
24
 
25
25
  - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
26
26
  - name: Install uv
27
- uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
27
+ uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
28
28
  - name: Set up Python
29
29
  uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
30
30
  with:
@@ -80,6 +80,6 @@ jobs:
80
80
 
81
81
  # Upload the results to GitHub's code scanning dashboard.
82
82
  - name: "Upload to code-scanning"
83
- uses: github/codeql-action/upload-sarif@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
83
+ uses: github/codeql-action/upload-sarif@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
84
84
  with:
85
85
  sarif_file: results.sarif
@@ -34,7 +34,7 @@ jobs:
34
34
  fetch-depth: 0 # Fetch all history for proper tag creation
35
35
  token: ${{ secrets.BOT_TOKEN }}
36
36
  - name: Install uv
37
- uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
37
+ uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
38
38
 
39
39
  - name: Current version
40
40
  id: current_version
@@ -1,7 +1,7 @@
1
1
  repos:
2
2
  - repo: https://github.com/astral-sh/ruff-pre-commit
3
3
  # Ruff version.
4
- rev: v0.14.6
4
+ rev: v0.14.8
5
5
  hooks:
6
6
  # Run the linter.
7
7
  - id: ruff-check
@@ -16,4 +16,4 @@ repos:
16
16
  rev: v0.7.2
17
17
  hooks:
18
18
  - id: shellcheck
19
- args: [-x]
19
+ args: [-x]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: amati
3
- Version: 0.3.13
3
+ Version: 0.3.15
4
4
  Summary: Validates that a .yaml or .json file conforms to the OpenAPI Specifications 3.x.
5
5
  Project-URL: Homepage, https://github.com/gwyli/amati
6
6
  Project-URL: Issues, https://github.com/gwyli/amati/issues
@@ -283,6 +283,15 @@
283
283
  "isOsiApproved": false,
284
284
  "isFsfLibre": false
285
285
  },
286
+ {
287
+ "reference": "https://spdx.org/licenses/ALGLIB-Documentation.html",
288
+ "isDeprecatedLicenseId": false,
289
+ "detailsUrl": "https://spdx.org/licenses/ALGLIB-Documentation.json",
290
+ "name": "ALGLIB Documentation License",
291
+ "licenseId": "ALGLIB-Documentation",
292
+ "seeAlso": [],
293
+ "isOsiApproved": true
294
+ },
286
295
  {
287
296
  "reference": "https://spdx.org/licenses/AMD-newlib.html",
288
297
  "isDeprecatedLicenseId": false,
@@ -4357,6 +4366,18 @@
4357
4366
  ],
4358
4367
  "isOsiApproved": false
4359
4368
  },
4369
+ {
4370
+ "reference": "https://spdx.org/licenses/ISO-permission.html",
4371
+ "isDeprecatedLicenseId": false,
4372
+ "detailsUrl": "https://spdx.org/licenses/ISO-permission.json",
4373
+ "name": "ISO permission notice",
4374
+ "licenseId": "ISO-permission",
4375
+ "seeAlso": [
4376
+ "https://gitlab.com/agmartin/linuxdoc-tools/-/blob/master/iso-entities/COPYING?ref_type=heads",
4377
+ "https://www.itu.int/ITU-T/formal-language/itu-t/t/t173/1997/ISOMHEG-sir.html"
4378
+ ],
4379
+ "isOsiApproved": false
4380
+ },
4360
4381
  {
4361
4382
  "reference": "https://spdx.org/licenses/Jam.html",
4362
4383
  "isDeprecatedLicenseId": false,
@@ -1416,6 +1416,7 @@
1416
1416
  ".\u516b\u5366",
1417
1417
  "\u200f.\u05d9\u05e9\u05e8\u05d0\u05dc\u200e",
1418
1418
  "\u200f.\u0645\u0648\u0642\u0639\u200e",
1419
+ ".\u4e00\u53f7\u5e97",
1419
1420
  ".\u09ac\u09be\u0982\u09b2\u09be",
1420
1421
  ".\u516c\u76ca",
1421
1422
  ".\u516c\u53f8",
@@ -0,0 +1,48 @@
1
+ """
2
+ Handles Pydantic errors and amati logs to provide a consistent view to the user.
3
+ """
4
+
5
+ import json
6
+ from typing import cast
7
+
8
+ from amati._logging import Log
9
+
10
+ type JSONPrimitive = str | int | float | bool | None
11
+ type JSONArray = list["JSONValue"]
12
+ type JSONObject = dict[str, "JSONValue"]
13
+ type JSONValue = JSONPrimitive | JSONArray | JSONObject
14
+
15
+
16
+ class ErrorHandler:
17
+ def __init__(self) -> None:
18
+ self._errors: list[JSONObject] = []
19
+
20
+ def register_logs(self, logs: list[Log]):
21
+ self._errors.extend(cast(list[JSONObject], logs))
22
+
23
+ def register_log(self, log: Log):
24
+ self._errors.append(cast(JSONObject, log))
25
+
26
+ def register_errors(self, errors: list[JSONObject]):
27
+ self._errors.extend(errors)
28
+
29
+ def deduplicate(self):
30
+ """
31
+ Remove duplicates by converting each dict to a JSON string for comparison.
32
+ """
33
+ seen: set[str] = set()
34
+ unique_data: list[JSONObject] = []
35
+
36
+ item: JSONObject
37
+ for item in self._errors:
38
+ # Convert to JSON string with sorted keys for consistent hashing
39
+ item_json = json.dumps(item, sort_keys=True, separators=(",", ":"))
40
+ if item_json not in seen:
41
+ seen.add(item_json)
42
+ unique_data.append(item)
43
+
44
+ self._errors = unique_data
45
+
46
+ @property
47
+ def errors(self) -> list[JSONObject]:
48
+ return self._errors
@@ -0,0 +1,226 @@
1
+ from dataclasses import dataclass
2
+ from pathlib import Path
3
+ from typing import Any
4
+
5
+ from pydantic import BaseModel
6
+
7
+ from amati.fields import URI, URIType
8
+
9
+
10
+ @dataclass(frozen=True)
11
+ class URIReference:
12
+ """Immutable record of a URI found during validation"""
13
+
14
+ uri: URI
15
+ source_document: Path
16
+ source_model_name: str # Just the string name for error reporting
17
+ source_field: str
18
+ target_model: type[BaseModel] # The model type to validate with
19
+
20
+ def resolve(self) -> Path:
21
+ """Resolve URI relative to source document, see
22
+ https://spec.openapis.org/oas/v3.1.1.html#relative-references-in-api-description-uris
23
+ """
24
+
25
+ if self.uri.scheme == "file":
26
+ if not self.uri.path:
27
+ raise ValueError("File URI must have a path component")
28
+
29
+ netloc: Path | None = (
30
+ Path(self.uri.authority)
31
+ if self.uri.authority
32
+ else Path(self.uri.host)
33
+ if self.uri.host
34
+ else None
35
+ )
36
+
37
+ return (
38
+ (netloc / self.uri.path).resolve()
39
+ if netloc
40
+ else Path(self.uri.path).resolve()
41
+ )
42
+
43
+ if self.uri.type == URIType.ABSOLUTE:
44
+ raise NotImplementedError("Absolute URI resolution not implemented")
45
+
46
+ if self.uri.type == URIType.NETWORK_PATH:
47
+ return Path(self.uri).resolve()
48
+
49
+ if self.uri.type == URIType.RELATIVE:
50
+ path: Path = self.source_document.parent / self.uri.lstrip("/")
51
+ return path.resolve()
52
+
53
+ if self.uri.type == URIType.JSON_POINTER:
54
+ path: Path = self.source_document.parent / self.uri.lstrip("#/")
55
+ return path.resolve()
56
+
57
+ # Guard against future changes
58
+ raise ValueError(f"Unknown URI type: {self.uri.type}") # pragma: no cover
59
+
60
+
61
+ class URIRegistry:
62
+ """Registry for discovered URIs using the Singleton pattern.
63
+
64
+ This class maintains a central registry of all URI references discovered
65
+ during document validation. It tracks both the URIs themselves and which
66
+ documents have already been processed to avoid duplicate validation.
67
+
68
+ Attributes:
69
+ _instance: Class-level singleton instance.
70
+ _uris: List of all registered URI references.
71
+ _processed: Set of file paths that have been validated.
72
+ """
73
+
74
+ _instance = None
75
+
76
+ def __init__(self):
77
+ """Initialize a new URIRegistry instance.
78
+
79
+ Note:
80
+ This should not be called directly. Use get_instance() instead
81
+ to obtain the singleton instance.
82
+ """
83
+ self._uris: list[URIReference] = []
84
+ self._processed: set[Path] = set()
85
+
86
+ @classmethod
87
+ def get_instance(cls) -> URIRegistry:
88
+ """Get or create the singleton instance of URIRegistry.
89
+
90
+ Returns:
91
+ URIRegistry: The singleton instance of the registry.
92
+ """
93
+ if cls._instance is None:
94
+ cls._instance = cls()
95
+
96
+ return cls._instance
97
+
98
+ def register(self, ref: URIReference):
99
+ """Register a discovered URI reference.
100
+
101
+ Args:
102
+ ref (URIReference): The URI reference to register, including
103
+ source document, and target model.
104
+ """
105
+
106
+ if not isinstance(ref, URIReference): # pyright: ignore[reportUnnecessaryIsInstance]
107
+ raise TypeError("ref must be an instance of URIReference")
108
+
109
+ self._uris.append(ref)
110
+
111
+ def mark_processed(self, path: Path):
112
+ """Mark a document as having been validated.
113
+
114
+ The path is resolved to an absolute path before storage to ensure
115
+ consistent tracking regardless of how the path was specified.
116
+
117
+ Args:
118
+ path (Path): The file path of the document that has been processed.
119
+ """
120
+ self._processed.add(path.resolve())
121
+
122
+ def is_processed(self, path: Path) -> bool:
123
+ """Check if a document has already been validated.
124
+
125
+ Args:
126
+ path (Path): The file path to check.
127
+
128
+ Returns:
129
+ bool: True if the document has been processed, False otherwise.
130
+ """
131
+ return path.resolve() in self._processed
132
+
133
+ def get_all_references(self) -> list[URIReference]:
134
+ """Get all discovered URI references.
135
+
136
+ Returns:
137
+ list[URIReference]: A copy of the list of all registered URI
138
+ references. Returns a copy to prevent external modification
139
+ of the internal registry.
140
+ """
141
+ return self._uris.copy()
142
+
143
+ def resolvable(self, path: Path) -> bool:
144
+ """Check if the file referenced by a URI exists.
145
+
146
+ Args:
147
+ path (Path): The file path to verify.
148
+
149
+ Returns:
150
+ bool: True if the path points to an existing file, False otherwise.
151
+ """
152
+ return path.is_file()
153
+
154
+ def reset(self):
155
+ """Reset the registry for a new validation run.
156
+
157
+ Clears all registered URIs and processed document records. This is
158
+ typically called at the beginning of a new validation session.
159
+ """
160
+ self._uris.clear()
161
+ self._processed.clear()
162
+
163
+
164
+ class URICollectorMixin(BaseModel):
165
+ """Mixin for Pydantic models to automatically collect URIs during validation.
166
+
167
+ This mixin hooks into the Pydantic model lifecycle to automatically
168
+ discover and register URI fields during model instantiation. It inspects
169
+ all fields after validation and registers any URI-type fields with the
170
+ URIRegistry for subsequent processing.
171
+
172
+ The mixin expects a 'current_document' key in the validation context
173
+ to track the source document for each URI reference.
174
+ """
175
+
176
+ def model_post_init(self, __context: dict[str, Any]) -> None:
177
+ """Post-initialization hook to collect URI references from model fields.
178
+
179
+ This method is automatically called by Pydantic after model validation
180
+ is complete. It inspects all fields for URI types and registers them
181
+ with the singleton URIRegistry.
182
+
183
+ Args:
184
+ __context (dict[str, Any]): Validation context dictionary. Expected
185
+ to contain a 'current_document' key with the path to the source
186
+ document being validated.
187
+
188
+ Note:
189
+ This method calls super().model_post_init() to ensure compatibility
190
+ with other mixins and the base model's initialization process.
191
+
192
+ Example:
193
+ Context should be passed during model instantiation:
194
+ >>> class MyModel(URICollectorMixin, BaseModel):
195
+ ... ref: URI
196
+ >>> model = MyModel.model_validate(
197
+ ... {"ref": "http://example.com/resource"},
198
+ ... context={"current_document": "/path/to/doc.json"}
199
+ ... )
200
+ """
201
+ super().model_post_init(__context)
202
+
203
+ if not __context:
204
+ return
205
+
206
+ current_doc = __context.get("current_document")
207
+ if not current_doc:
208
+ return
209
+
210
+ # Inspect all fields for URI types
211
+ for field_name, field_value in self.model_dump().items():
212
+ if field_value is None:
213
+ continue
214
+
215
+ # Check if this field contains a URI
216
+ # Adjust this check based on your URI type implementation
217
+ if isinstance(field_value, URI):
218
+ ref = URIReference(
219
+ uri=field_value,
220
+ source_document=Path(current_doc),
221
+ source_model_name=self.__class__.__name__,
222
+ source_field=field_name,
223
+ # The linked document should be validated with the same model type
224
+ target_model=self.__class__,
225
+ )
226
+ URIRegistry.get_instance().register(ref)
@@ -5,6 +5,7 @@ without all its dependencies. This module rebuilds all models in a module.
5
5
 
6
6
  import inspect
7
7
  import sys
8
+ import typing
8
9
  from collections import defaultdict
9
10
  from types import ModuleType
10
11
 
@@ -26,20 +27,46 @@ class ModelDependencyResolver:
26
27
  """Register a Pydantic model for dependency analysis."""
27
28
  self.models[model.__name__] = model
28
29
 
29
- def register_models(self, models: list[type[BaseModel]]) -> None:
30
- """Register multiple Pydantic models."""
31
- for model in models:
32
- self.register_model(model)
30
+ @staticmethod
31
+ def extract_all_references(annotation: typing.Any, refs: set[str] | None = None):
32
+ """
33
+ Recursively extract all ForwardRef and type references from an annotation.
34
+
35
+ Args:
36
+ annotation: A type annotation (potentially deeply nested)
37
+ refs: Set to accumulate references (used internally for recursion)
38
+
39
+ Returns:
40
+ Set of either ForwardRef objects or actual type/class objects
41
+ """
42
+ if refs is None:
43
+ refs = set()
44
+
45
+ # Direct ForwardRef
46
+ if isinstance(annotation, typing.ForwardRef):
47
+ refs.add(annotation.__forward_arg__)
48
+ return refs
49
+
50
+ # Direct class reference
51
+ if isinstance(annotation, type):
52
+ refs.add(annotation.__name__)
53
+ return refs
54
+
55
+ for origin in typing.get_args(annotation):
56
+ ModelDependencyResolver.extract_all_references(origin, refs)
57
+
58
+ return refs
33
59
 
34
60
  def _analyze_model_dependencies(self, model: type[BaseModel]) -> set[str]:
35
61
  """Analyze a single model's dependencies from its annotations."""
36
62
  dependencies: set[str] = set()
37
63
 
38
64
  for field_info in model.model_fields.values():
39
- # Use a magic value that's an invalid class name for getattr so if
40
- # there is no __name__ attribute it won't appear in self.models
41
- if (name := getattr(field_info.annotation, "__name__", "!")) in self.models:
42
- dependencies.update(name)
65
+ references = ModelDependencyResolver.extract_all_references(
66
+ field_info.annotation
67
+ )
68
+
69
+ dependencies.update(ref for ref in references if ref in self.models)
43
70
 
44
71
  return dependencies
45
72
 
@@ -48,18 +75,10 @@ class ModelDependencyResolver:
48
75
  self.dependencies.clear()
49
76
  self.graph.clear()
50
77
 
51
- # First pass: collect all dependencies
52
78
  for model_name, model in self.models.items():
53
79
  deps = self._analyze_model_dependencies(model)
54
80
  self.dependencies[model_name] = deps
55
81
 
56
- # Second pass: build directed graph
57
- for model_name, deps in self.dependencies.items():
58
- for dep in deps:
59
- if dep in self.models:
60
- # Build forward graph (dependency -> dependent)
61
- self.graph[dep].append(model_name)
62
-
63
82
  def _tarjan_scc(self) -> list[list[str]]:
64
83
  """Find strongly connected components using Tarjan's algorithm."""
65
84
  index_counter = [0]
@@ -76,13 +95,6 @@ class ModelDependencyResolver:
76
95
  stack.append(node)
77
96
  on_stack[node] = True
78
97
 
79
- for successor in self.graph[node]:
80
- if successor not in index:
81
- strongconnect(successor)
82
- lowlinks[node] = min(lowlinks[node], lowlinks[successor])
83
- elif on_stack[successor]:
84
- lowlinks[node] = min(lowlinks[node], index[successor])
85
-
86
98
  if lowlinks[node] == index[node]:
87
99
  component: list[str] = []
88
100
  while True:
@@ -99,41 +111,6 @@ class ModelDependencyResolver:
99
111
 
100
112
  return sccs
101
113
 
102
- def _topological_sort_sccs(self, sccs: list[list[str]]) -> list[list[str]]:
103
- """Topologically sort the strongly connected components."""
104
- # Map each node to its SCC index
105
- node_to_scc = {node: i for i, scc in enumerate(sccs) for node in scc}
106
-
107
- # Find dependencies between SCCs
108
- dependencies: set[tuple[int, ...]] = set()
109
- for node in self.models:
110
- for neighbor in self.graph[node]:
111
- src_scc, dst_scc = node_to_scc[node], node_to_scc[neighbor]
112
- if src_scc != dst_scc:
113
- dependencies.add((src_scc, dst_scc))
114
-
115
- # Count incoming edges for each SCC
116
- in_degree = [0] * len(sccs)
117
- for _, dst in dependencies:
118
- in_degree[dst] += 1
119
-
120
- # Process SCCs with no dependencies first
121
- ready = [i for i, deg in enumerate(in_degree) if deg == 0]
122
- result: list[list[str]] = []
123
-
124
- while ready:
125
- current = ready.pop()
126
- result.append(sccs[current])
127
-
128
- # Remove this SCC and update in-degrees
129
- for src, dst in dependencies:
130
- if src == current:
131
- in_degree[dst] -= 1
132
- if in_degree[dst] == 0:
133
- ready.append(dst)
134
-
135
- return result
136
-
137
114
  def get_rebuild_order(self) -> list[list[str]]:
138
115
  """
139
116
  Get the order in which models should be rebuilt.
@@ -141,8 +118,7 @@ class ModelDependencyResolver:
141
118
  rebuilt together.
142
119
  """
143
120
  self.build_dependency_graph()
144
- sccs = self._tarjan_scc()
145
- return self._topological_sort_sccs(sccs)
121
+ return self._tarjan_scc()
146
122
 
147
123
  def rebuild_models(self) -> None:
148
124
  """Rebuild all registered models in the correct dependency order."""