amati 0.2__tar.gz → 0.2.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. {amati-0.2 → amati-0.2.2}/.github/dependabot.yml +6 -2
  2. amati-0.2.2/.github/workflows/checks.yaml +90 -0
  3. amati-0.2.2/.github/workflows/publish.yaml +47 -0
  4. amati-0.2.2/.python-version +1 -0
  5. {amati-0.2 → amati-0.2.2}/PKG-INFO +26 -26
  6. {amati-0.2 → amati-0.2.2}/README.md +25 -25
  7. {amati-0.2 → amati-0.2.2}/amati/amati.py +37 -14
  8. {amati-0.2 → amati-0.2.2}/amati/logging.py +1 -1
  9. {amati-0.2 → amati-0.2.2}/amati/model_validators.py +32 -32
  10. {amati-0.2 → amati-0.2.2}/amati/validators/generic.py +3 -3
  11. {amati-0.2 → amati-0.2.2}/amati/validators/oas304.py +4 -4
  12. {amati-0.2 → amati-0.2.2}/amati/validators/oas311.py +5 -5
  13. {amati-0.2 → amati-0.2.2}/bin/checks.sh +2 -2
  14. {amati-0.2 → amati-0.2.2}/pyproject.toml +1 -1
  15. {amati-0.2 → amati-0.2.2}/tests/model_validators/test_all_of.py +63 -63
  16. {amati-0.2 → amati-0.2.2}/tests/model_validators/test_at_least_one.py +51 -51
  17. {amati-0.2 → amati-0.2.2}/tests/model_validators/test_if_then.py +57 -57
  18. {amati-0.2 → amati-0.2.2}/tests/model_validators/test_only_one.py +43 -43
  19. {amati-0.2 → amati-0.2.2}/tests/test_logging.py +5 -5
  20. {amati-0.2 → amati-0.2.2}/tests/validators/test_generic.py +11 -11
  21. {amati-0.2 → amati-0.2.2}/tests/validators/test_licence_object.py +24 -24
  22. {amati-0.2 → amati-0.2.2}/tests/validators/test_security_scheme_object.py +33 -33
  23. {amati-0.2 → amati-0.2.2}/tests/validators/test_server_variable_object.py +7 -7
  24. {amati-0.2 → amati-0.2.2}/uv.lock +44 -44
  25. amati-0.2/.github/workflows/checks.yaml +0 -64
  26. amati-0.2/.github/workflows/publish.yaml +0 -29
  27. amati-0.2/.python-version +0 -1
  28. {amati-0.2 → amati-0.2.2}/.dockerignore +0 -0
  29. {amati-0.2 → amati-0.2.2}/.github/workflows/codeql.yml +0 -0
  30. {amati-0.2 → amati-0.2.2}/.github/workflows/coverage.yaml +0 -0
  31. {amati-0.2 → amati-0.2.2}/.gitignore +0 -0
  32. {amati-0.2 → amati-0.2.2}/.pre-commit-config.yaml +0 -0
  33. {amati-0.2 → amati-0.2.2}/.pylintrc +0 -0
  34. {amati-0.2 → amati-0.2.2}/Dockerfile +0 -0
  35. {amati-0.2 → amati-0.2.2}/LICENSE +0 -0
  36. {amati-0.2 → amati-0.2.2}/TEMPLATE.html +0 -0
  37. {amati-0.2 → amati-0.2.2}/amati/__init__.py +0 -0
  38. {amati-0.2 → amati-0.2.2}/amati/_error_handler.py +0 -0
  39. {amati-0.2 → amati-0.2.2}/amati/_resolve_forward_references.py +0 -0
  40. {amati-0.2 → amati-0.2.2}/amati/data/http-status-codes.json +0 -0
  41. {amati-0.2 → amati-0.2.2}/amati/data/iso9110.json +0 -0
  42. {amati-0.2 → amati-0.2.2}/amati/data/media-types.json +0 -0
  43. {amati-0.2 → amati-0.2.2}/amati/data/schemes.json +0 -0
  44. {amati-0.2 → amati-0.2.2}/amati/data/spdx-licences.json +0 -0
  45. {amati-0.2 → amati-0.2.2}/amati/data/tlds.json +0 -0
  46. {amati-0.2 → amati-0.2.2}/amati/exceptions.py +0 -0
  47. {amati-0.2 → amati-0.2.2}/amati/fields/__init__.py +0 -0
  48. {amati-0.2 → amati-0.2.2}/amati/fields/_custom_types.py +0 -0
  49. {amati-0.2 → amati-0.2.2}/amati/fields/commonmark.py +0 -0
  50. {amati-0.2 → amati-0.2.2}/amati/fields/email.py +0 -0
  51. {amati-0.2 → amati-0.2.2}/amati/fields/http_status_codes.py +0 -0
  52. {amati-0.2 → amati-0.2.2}/amati/fields/iso9110.py +0 -0
  53. {amati-0.2 → amati-0.2.2}/amati/fields/json.py +0 -0
  54. {amati-0.2 → amati-0.2.2}/amati/fields/media.py +0 -0
  55. {amati-0.2 → amati-0.2.2}/amati/fields/oas.py +0 -0
  56. {amati-0.2 → amati-0.2.2}/amati/fields/spdx_licences.py +0 -0
  57. {amati-0.2 → amati-0.2.2}/amati/fields/uri.py +0 -0
  58. {amati-0.2 → amati-0.2.2}/amati/file_handler.py +0 -0
  59. {amati-0.2 → amati-0.2.2}/amati/grammars/oas.py +0 -0
  60. {amati-0.2 → amati-0.2.2}/amati/grammars/rfc6901.py +0 -0
  61. {amati-0.2 → amati-0.2.2}/amati/grammars/rfc7159.py +0 -0
  62. {amati-0.2 → amati-0.2.2}/amati/validators/__init__.py +0 -0
  63. {amati-0.2 → amati-0.2.2}/bin/startup.sh +0 -0
  64. {amati-0.2 → amati-0.2.2}/scripts/data/http_status_code.py +0 -0
  65. {amati-0.2 → amati-0.2.2}/scripts/data/iso9110.py +0 -0
  66. {amati-0.2 → amati-0.2.2}/scripts/data/media_types.py +0 -0
  67. {amati-0.2 → amati-0.2.2}/scripts/data/schemes.py +0 -0
  68. {amati-0.2 → amati-0.2.2}/scripts/data/spdx_licences.py +0 -0
  69. {amati-0.2 → amati-0.2.2}/scripts/data/tlds.py +0 -0
  70. {amati-0.2 → amati-0.2.2}/scripts/tests/setup_test_specs.py +0 -0
  71. {amati-0.2 → amati-0.2.2}/tests/__init__.py +0 -0
  72. {amati-0.2 → amati-0.2.2}/tests/data/.amati.tests.yaml +0 -0
  73. {amati-0.2 → amati-0.2.2}/tests/data/DigitalOcean-public.v2.errors.json +0 -0
  74. {amati-0.2 → amati-0.2.2}/tests/data/api.github.com.yaml.errors.json +0 -0
  75. {amati-0.2 → amati-0.2.2}/tests/data/next-api.github.com.yaml.errors.json +0 -0
  76. {amati-0.2 → amati-0.2.2}/tests/data/openapi.yaml +0 -0
  77. {amati-0.2 → amati-0.2.2}/tests/data/redocly.openapi.yaml.errors.json +0 -0
  78. {amati-0.2 → amati-0.2.2}/tests/fields/__init__.py +0 -0
  79. {amati-0.2 → amati-0.2.2}/tests/fields/test_email.py +0 -0
  80. {amati-0.2 → amati-0.2.2}/tests/fields/test_http_status_codes.py +0 -0
  81. {amati-0.2 → amati-0.2.2}/tests/fields/test_iso9110.py +0 -0
  82. {amati-0.2 → amati-0.2.2}/tests/fields/test_media.py +0 -0
  83. {amati-0.2 → amati-0.2.2}/tests/fields/test_oas.py +0 -0
  84. {amati-0.2 → amati-0.2.2}/tests/fields/test_spdx_licences.py +0 -0
  85. {amati-0.2 → amati-0.2.2}/tests/fields/test_uri.py +0 -0
  86. {amati-0.2 → amati-0.2.2}/tests/helpers.py +0 -0
  87. {amati-0.2 → amati-0.2.2}/tests/test_amati.py +0 -0
  88. {amati-0.2 → amati-0.2.2}/tests/test_external_specs.py +0 -0
  89. {amati-0.2 → amati-0.2.2}/tests/validators/__init__.py +0 -0
@@ -1,6 +1,5 @@
1
1
  version: 2
2
2
  updates:
3
- # Enable version updates for npm
4
3
  - package-ecosystem: "uv"
5
4
  # Look for `uv.lock` file in the root directory.
6
5
  directory: "/"
@@ -8,7 +7,6 @@ updates:
8
7
  schedule:
9
8
  interval: "daily"
10
9
 
11
- # Enable version updates for GitHub Actions
12
10
  - package-ecosystem: "github-actions"
13
11
  # Workflow files stored in the default location of `.github/workflows`
14
12
  # You don't need to specify `/.github/workflows` for `directory`. You can use `directory: "/"`.
@@ -18,3 +16,9 @@ updates:
18
16
  allow:
19
17
  - dependency-type: "direct"
20
18
  - dependency-type: "indirect"
19
+
20
+ - package-ecosystem: "docker"
21
+ # Look for `Dockerfile` in the root directory
22
+ directory: "/"
23
+ schedule:
24
+ interval: "weekly"
@@ -0,0 +1,90 @@
1
+ name: Checks
2
+
3
+ on:
4
+ pull_request:
5
+ branches: [ "main" ]
6
+
7
+ permissions:
8
+ contents: read
9
+
10
+ jobs:
11
+ build:
12
+ runs-on: ubuntu-latest
13
+ permissions:
14
+ pull-requests: write
15
+ contents: write
16
+ steps:
17
+ - uses: actions/checkout@v4
18
+
19
+ - uses: dorny/paths-filter@v3
20
+ id: check_changes
21
+ with:
22
+ filters: |
23
+ relevant:
24
+ - '**/*.py'
25
+ - '**/*.sh'
26
+ - '**/*.json'
27
+ - '**/*.html'
28
+ - '**/*.toml'
29
+ - '**/*.lock'
30
+ - 'tests/**/*.yaml'
31
+ - '.pylintrc'
32
+ - '.python-version'
33
+ - '.Dockerfile'
34
+
35
+ - name: Skip message
36
+ if: ${{ !(steps.check_changes.outputs.relevant == 'true') }}
37
+ run: echo "Skipping Python checks - no relevant changes detected"
38
+
39
+
40
+ - name: Install uv
41
+ if: steps.check_changes.outputs.relevant == 'true'
42
+ uses: astral-sh/setup-uv@v6
43
+
44
+ - name: Set up Python
45
+ if: steps.check_changes.outputs.relevant == 'true'
46
+ uses: actions/setup-python@v5
47
+ with:
48
+ python-version-file: ".python-version"
49
+
50
+ - name: Install the project
51
+ if: steps.check_changes.outputs.relevant == 'true'
52
+ run: uv sync --locked --all-extras --dev
53
+
54
+ - name: Formatting
55
+ if: steps.check_changes.outputs.relevant == 'true'
56
+ run: uv run black .
57
+
58
+ - name: Import sorting
59
+ if: steps.check_changes.outputs.relevant == 'true'
60
+ run: uv run isort .
61
+
62
+ - name: Linting
63
+ if: steps.check_changes.outputs.relevant == 'true'
64
+ run: uv run pylint amati
65
+
66
+ - name: Test Linting
67
+ if: steps.check_changes.outputs.relevant == 'true'
68
+ run: uv run pylint tests
69
+
70
+ - name: Testing
71
+ if: steps.check_changes.outputs.relevant == 'true'
72
+ run: uv run pytest -m"not external" --cov
73
+
74
+ - name: Doctests
75
+ if: steps.check_changes.outputs.relevant == 'true'
76
+ run: uv run pytest --doctest-modules amati/
77
+
78
+ - name: Coverage comment
79
+ if: steps.check_changes.outputs.relevant == 'true'
80
+ id: coverage_comment
81
+ uses: py-cov-action/python-coverage-comment-action@v3
82
+ with:
83
+ GITHUB_TOKEN: ${{ github.token }}
84
+
85
+ - name: Store Pull Request comment to be posted
86
+ if: steps.check_changes.outputs.run_python_checks == 'true'
87
+ uses: actions/upload-artifact@v4
88
+ with:
89
+ name: python-coverage-comment-action
90
+ path: python-coverage-comment-action.txt
@@ -0,0 +1,47 @@
1
+ name: Publish to PyPI
2
+
3
+ on:
4
+ release:
5
+ types: [published]
6
+
7
+
8
+ jobs:
9
+ run:
10
+ name: "Build and publish release"
11
+ runs-on: ubuntu-latest
12
+ permissions:
13
+ id-token: write # Required for OIDC authentication
14
+ contents: read
15
+
16
+ steps:
17
+ - uses: actions/checkout@v4
18
+ - name: Install uv
19
+ uses: astral-sh/setup-uv@v6
20
+ - name: Set up Python
21
+ uses: actions/setup-python@v5
22
+ with:
23
+ python-version-file: ".python-version"
24
+ - name: Build
25
+ run: uv build
26
+ - name: Publish to PyPI test
27
+ run: uv publish --index testpypi
28
+ - name: Publish
29
+ run: uv publish
30
+
31
+ - name: Set up Docker Buildx
32
+ uses: docker/setup-buildx-action@v3
33
+
34
+ - name: Log in to Docker Hub
35
+ uses: docker/login-action@v3
36
+ with:
37
+ username: ${{ secrets.DOCKERHUB_USER }}
38
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
39
+
40
+ - name: Build and push Docker image
41
+ uses: docker/build-push-action@v6
42
+ with:
43
+ context: .
44
+ push: true
45
+ tags: |
46
+ ${{ secrets.DOCKERHUB_USERNAME }}/${{ secrets.DOCKERHUB_REPO }}:${{ github.event.release.tag_name }}.1
47
+ ${{ secrets.DOCKERHUB_USERNAME }}/${{ secrets.DOCKERHUB_REPO }}:alpha
@@ -0,0 +1 @@
1
+ 3.13.5
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: amati
3
- Version: 0.2
3
+ Version: 0.2.2
4
4
  Summary: Validates that a .yaml or .json file conforms to the OpenAPI Specifications 3.x.
5
5
  Project-URL: Homepage, https://github.com/ben-alexander/amati
6
6
  Project-URL: Issues, https://github.com/ben-alexander/amati/issues
@@ -28,7 +28,7 @@ amati is designed to validate that a file conforms to the [OpenAPI Specification
28
28
 
29
29
  ## Name
30
30
 
31
- amati means to observe in Malay, especially with attention to detail. It's also one of the plurals of beloved or favourite in Italian.
31
+ "amati" means to observe in Malay, especially with attention to detail. It's also one of the plurals of beloved or favourite in Italian.
32
32
 
33
33
  ## Usage
34
34
 
@@ -59,34 +59,47 @@ A Dockerfile is available on [DockerHub](https://hub.docker.com/r/benale/amati/t
59
59
  To run against a specific specification the location of the specification needs to be mounted in the container.
60
60
 
61
61
  ```sh
62
- docker run -v "<path-to-mount>:/<mount-name> amati <options>
62
+ docker run -v "<path-to-mount>:/<mount-name> amati:alpha <options>
63
63
  ```
64
64
 
65
65
  e.g.
66
66
 
67
67
  ```sh
68
- docker run -v /Users/myuser/myrepo:/data amati --spec data/myspec.yaml --hr
68
+ docker run -v /Users/myuser/myrepo:/data amati:alpha --spec data/myspec.yaml --hr
69
69
  ```
70
70
 
71
71
  ## Architecture
72
72
 
73
- This uses Pydantic, especially the validation, and Typing to construct the entire OAS as a single data type. Passing a dictionary to the top-level data type runs all the validation in the Pydantic models constructing a single set of inherited classes and datatypes that validate that the API specification is accurate.
73
+ amati uses Pydantic, especially the validation, and Typing to construct the entire OAS as a single data type. Passing a dictionary to the top-level data type runs all the validation in the Pydantic models constructing a single set of inherited classes and datatypes that validate that the API specification is accurate. To the extent that Pydantic is functional, amati has a [functional core and an imperative shell](https://www.destroyallsoftware.com/screencasts/catalog/functional-core-imperative-shell).
74
74
 
75
75
  Where the specification conforms, but relies on implementation-defined behavior (e.g. [data type formats](https://spec.openapis.org/oas/v3.1.1.html#data-type-format)), a warning will be raised.
76
76
 
77
77
  ## Contributing
78
78
 
79
- ### Requirements
79
+ ### Prerequisites
80
80
 
81
81
  * The latest version of [uv](https://docs.astral.sh/uv/)
82
82
  * [git 2.49+](https://git-scm.com/downloads/linux)
83
+ * [Docker](https://docs.docker.com/engine/install/)
84
+
85
+ ### Starting
86
+
87
+ The project uses a [`pyproject.toml` file](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#writing-pyproject-toml) to determine what to build.
88
+
89
+ To get started run:
90
+
91
+ ```sh
92
+ uv python install
93
+ uv venv
94
+ uv sync
95
+ ```
83
96
 
84
97
  ### Testing and formatting
85
98
 
86
99
  This project uses:
87
100
 
88
101
  * [Pytest](https://docs.pytest.org/en/stable/) as a testing framework
89
- * [PyLance](https://marketplace.visualstudio.com/items?itemName=ms-python.vscode-pylance) on strict mode for type checking
102
+ * [Pyright](https://microsoft.github.io/pyright/#/) on strict mode for type checking
90
103
  * [Pylint](https://www.pylint.org/) as a linter, using a modified version from [Google's style guide](https://google.github.io/styleguide/pyguide.html)
91
104
  * [Hypothesis](https://hypothesis.readthedocs.io/en/latest/index.html) for test data generation
92
105
  * [Coverage](https://coverage.readthedocs.io/en/7.6.8/) on both the tests and code for test coverage
@@ -94,33 +107,20 @@ This project uses:
94
107
  * [isort](https://pycqa.github.io/isort/) for import sorting
95
108
 
96
109
  It's expected that there are no errors and 100% of the code is reached and executed. The strategy for test coverage is based on parsing test specifications and not unit tests.
97
-
98
- amati runs tests on external specifications, detailed in `tests/data/.amati.tests.yaml`. To be able to run these tests the appropriate GitHub repos need to be local. Specific revisions of the repos can be downloaded by running
110
+ amati runs tests on the external specifications, detailed in `tests/data/.amati.tests.yaml`. To be able to run these tests the GitHub repos containing the specifications need to be available locally. Specific revisions of the repos can be downloaded by running the following, which will clone the repos into `../amati-tests-specs/<repo-name>`.
99
111
 
100
112
  ```sh
101
113
  python scripts/tests/setup_test_specs.py
102
114
  ```
103
115
 
116
+ If there are some issues with the specification a JSON file detailing those should be placed into `tests/data/` and the name of that file noted in `tests/data/.amati.tests.yaml` for the test suite to pick it up and check that the errors are expected. Any specifications that close the coverage gap are gratefully received.
117
+
104
118
  To run everything, from linting, type checking to downloading test specs and building and testing the Docker image run:
105
119
 
106
120
  ```sh
107
121
  sh bin/checks.sh
108
122
  ```
109
123
 
110
- You will need to have Docker installed.
111
-
112
- ### Building
113
-
114
- The project uses a [`pyproject.toml` file](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#writing-pyproject-toml) to determine what to build.
115
-
116
- To install, assuming that [uv](https://docs.astral.sh/uv/) is already installed and initialised
117
-
118
- ```sh
119
- uv python install
120
- uv venv
121
- uv sync
122
- ```
123
-
124
124
  ### Docker
125
125
 
126
126
  A development Docker image is provided, `Dockerfile.dev`, to build:
@@ -129,7 +129,7 @@ A development Docker image is provided, `Dockerfile.dev`, to build:
129
129
  docker build -t amati -f Dockerfile .
130
130
  ```
131
131
 
132
- and to run against a specific specification the location of the specification needs to be mounted in the container.
132
+ to run against a specific specification the location of the specification needs to be mounted in the container.
133
133
 
134
134
  ```sh
135
135
  docker run -v "<path-to-mount>:/<mount-name> amati <options>
@@ -138,13 +138,13 @@ docker run -v "<path-to-mount>:/<mount-name> amati <options>
138
138
  This can be tested against a provided specification, from the root directory
139
139
 
140
140
  ```sh
141
- docker run --detach -v "$(pwd):/data" amati
141
+ docker run --detach -v "$(pwd):/data" amati <options>
142
142
  ```
143
143
 
144
144
 
145
145
  ### Data
146
146
 
147
- There are some scripts to create the data needed by the project, for example, all the possible licences. If the data needs to be refreshed this can be done by running the contents of `/scripts/data`.
147
+ There are some scripts to create the data needed by the project, for example, all the registered TLDs. To refresh the data, run the contents of `/scripts/data`.
148
148
 
149
149
 
150
150
 
@@ -4,7 +4,7 @@ amati is designed to validate that a file conforms to the [OpenAPI Specification
4
4
 
5
5
  ## Name
6
6
 
7
- amati means to observe in Malay, especially with attention to detail. It's also one of the plurals of beloved or favourite in Italian.
7
+ "amati" means to observe in Malay, especially with attention to detail. It's also one of the plurals of beloved or favourite in Italian.
8
8
 
9
9
  ## Usage
10
10
 
@@ -35,34 +35,47 @@ A Dockerfile is available on [DockerHub](https://hub.docker.com/r/benale/amati/t
35
35
  To run against a specific specification the location of the specification needs to be mounted in the container.
36
36
 
37
37
  ```sh
38
- docker run -v "<path-to-mount>:/<mount-name> amati <options>
38
+ docker run -v "<path-to-mount>:/<mount-name> amati:alpha <options>
39
39
  ```
40
40
 
41
41
  e.g.
42
42
 
43
43
  ```sh
44
- docker run -v /Users/myuser/myrepo:/data amati --spec data/myspec.yaml --hr
44
+ docker run -v /Users/myuser/myrepo:/data amati:alpha --spec data/myspec.yaml --hr
45
45
  ```
46
46
 
47
47
  ## Architecture
48
48
 
49
- This uses Pydantic, especially the validation, and Typing to construct the entire OAS as a single data type. Passing a dictionary to the top-level data type runs all the validation in the Pydantic models constructing a single set of inherited classes and datatypes that validate that the API specification is accurate.
49
+ amati uses Pydantic, especially the validation, and Typing to construct the entire OAS as a single data type. Passing a dictionary to the top-level data type runs all the validation in the Pydantic models constructing a single set of inherited classes and datatypes that validate that the API specification is accurate. To the extent that Pydantic is functional, amati has a [functional core and an imperative shell](https://www.destroyallsoftware.com/screencasts/catalog/functional-core-imperative-shell).
50
50
 
51
51
  Where the specification conforms, but relies on implementation-defined behavior (e.g. [data type formats](https://spec.openapis.org/oas/v3.1.1.html#data-type-format)), a warning will be raised.
52
52
 
53
53
  ## Contributing
54
54
 
55
- ### Requirements
55
+ ### Prerequisites
56
56
 
57
57
  * The latest version of [uv](https://docs.astral.sh/uv/)
58
58
  * [git 2.49+](https://git-scm.com/downloads/linux)
59
+ * [Docker](https://docs.docker.com/engine/install/)
60
+
61
+ ### Starting
62
+
63
+ The project uses a [`pyproject.toml` file](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#writing-pyproject-toml) to determine what to build.
64
+
65
+ To get started run:
66
+
67
+ ```sh
68
+ uv python install
69
+ uv venv
70
+ uv sync
71
+ ```
59
72
 
60
73
  ### Testing and formatting
61
74
 
62
75
  This project uses:
63
76
 
64
77
  * [Pytest](https://docs.pytest.org/en/stable/) as a testing framework
65
- * [PyLance](https://marketplace.visualstudio.com/items?itemName=ms-python.vscode-pylance) on strict mode for type checking
78
+ * [Pyright](https://microsoft.github.io/pyright/#/) on strict mode for type checking
66
79
  * [Pylint](https://www.pylint.org/) as a linter, using a modified version from [Google's style guide](https://google.github.io/styleguide/pyguide.html)
67
80
  * [Hypothesis](https://hypothesis.readthedocs.io/en/latest/index.html) for test data generation
68
81
  * [Coverage](https://coverage.readthedocs.io/en/7.6.8/) on both the tests and code for test coverage
@@ -70,33 +83,20 @@ This project uses:
70
83
  * [isort](https://pycqa.github.io/isort/) for import sorting
71
84
 
72
85
  It's expected that there are no errors and 100% of the code is reached and executed. The strategy for test coverage is based on parsing test specifications and not unit tests.
73
-
74
- amati runs tests on external specifications, detailed in `tests/data/.amati.tests.yaml`. To be able to run these tests the appropriate GitHub repos need to be local. Specific revisions of the repos can be downloaded by running
86
+ amati runs tests on the external specifications, detailed in `tests/data/.amati.tests.yaml`. To be able to run these tests the GitHub repos containing the specifications need to be available locally. Specific revisions of the repos can be downloaded by running the following, which will clone the repos into `../amati-tests-specs/<repo-name>`.
75
87
 
76
88
  ```sh
77
89
  python scripts/tests/setup_test_specs.py
78
90
  ```
79
91
 
92
+ If there are some issues with the specification a JSON file detailing those should be placed into `tests/data/` and the name of that file noted in `tests/data/.amati.tests.yaml` for the test suite to pick it up and check that the errors are expected. Any specifications that close the coverage gap are gratefully received.
93
+
80
94
  To run everything, from linting, type checking to downloading test specs and building and testing the Docker image run:
81
95
 
82
96
  ```sh
83
97
  sh bin/checks.sh
84
98
  ```
85
99
 
86
- You will need to have Docker installed.
87
-
88
- ### Building
89
-
90
- The project uses a [`pyproject.toml` file](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#writing-pyproject-toml) to determine what to build.
91
-
92
- To install, assuming that [uv](https://docs.astral.sh/uv/) is already installed and initialised
93
-
94
- ```sh
95
- uv python install
96
- uv venv
97
- uv sync
98
- ```
99
-
100
100
  ### Docker
101
101
 
102
102
  A development Docker image is provided, `Dockerfile.dev`, to build:
@@ -105,7 +105,7 @@ A development Docker image is provided, `Dockerfile.dev`, to build:
105
105
  docker build -t amati -f Dockerfile .
106
106
  ```
107
107
 
108
- and to run against a specific specification the location of the specification needs to be mounted in the container.
108
+ to run against a specific specification the location of the specification needs to be mounted in the container.
109
109
 
110
110
  ```sh
111
111
  docker run -v "<path-to-mount>:/<mount-name> amati <options>
@@ -114,13 +114,13 @@ docker run -v "<path-to-mount>:/<mount-name> amati <options>
114
114
  This can be tested against a provided specification, from the root directory
115
115
 
116
116
  ```sh
117
- docker run --detach -v "$(pwd):/data" amati
117
+ docker run --detach -v "$(pwd):/data" amati <options>
118
118
  ```
119
119
 
120
120
 
121
121
  ### Data
122
122
 
123
- There are some scripts to create the data needed by the project, for example, all the possible licences. If the data needs to be refreshed this can be done by running the contents of `/scripts/data`.
123
+ There are some scripts to create the data needed by the project, for example, all the registered TLDs. To refresh the data, run the contents of `/scripts/data`.
124
124
 
125
125
 
126
126
 
@@ -16,7 +16,7 @@ sys.path.insert(0, str(Path(__file__).parent.parent))
16
16
  from amati._error_handler import handle_errors
17
17
  from amati._resolve_forward_references import resolve_forward_references
18
18
  from amati.file_handler import load_file
19
- from amati.logging import Log, LogMixin
19
+ from amati.logging import Log, Logger
20
20
 
21
21
  type JSONPrimitive = str | int | float | bool | None
22
22
  type JSONArray = list["JSONValue"]
@@ -113,9 +113,9 @@ def run(
113
113
 
114
114
  logs: list[Log] = []
115
115
 
116
- with LogMixin.context():
116
+ with Logger.context():
117
117
  result, errors = dispatch(data)
118
- logs.extend(LogMixin.logs)
118
+ logs.extend(Logger.logs)
119
119
 
120
120
  if errors or logs:
121
121
 
@@ -158,19 +158,38 @@ def run(
158
158
  if result and consistency_check:
159
159
  return check(data, result)
160
160
 
161
+ return True
161
162
 
162
- def discover(discover_dir: str = ".") -> list[Path]:
163
+
164
+ def discover(spec: str, discover_dir: str = ".") -> list[Path]:
163
165
  """
164
166
  Finds OpenAPI Specification files to validate
165
167
 
166
168
  Args:
169
+ spec: The path to a specific OpenAPI specification file.
167
170
  discover_dir: The directory to search through.
168
171
  Returns:
169
- A list of paths to validate.
172
+ A list of specifications to validate.
170
173
  """
171
174
 
172
175
  specs: list[Path] = []
173
176
 
177
+ # If a spec is provided, check if it exists and erorr if not
178
+ if spec:
179
+ spec_path = Path(spec)
180
+
181
+ if not spec_path.exists():
182
+ raise FileNotFoundError(f"File {spec} does not exist.")
183
+
184
+ if not spec_path.is_file():
185
+ raise IsADirectoryError(f"{spec} is a directory, not a file.")
186
+
187
+ specs.append(spec_path)
188
+
189
+ # End early if we're not also trying to find files
190
+ if not discover_dir:
191
+ return specs
192
+
174
193
  if Path("openapi.json").exists():
175
194
  specs.append(Path("openapi.json"))
176
195
 
@@ -258,16 +277,20 @@ if __name__ == "__main__":
258
277
  )
259
278
 
260
279
  args = parser.parse_args()
280
+
281
+ print('Starting amati...')
261
282
 
262
- if args.spec:
263
- specifications: list[Path] = [Path(args.spec)]
264
- else:
265
- specifications = discover(args.discover)
283
+ specifications = discover(args.spec, args.discover)
284
+ print(specifications)
266
285
 
267
286
  for specification in specifications:
268
- if successful_check := run(
287
+ successful_check = run(
269
288
  specification, args.consistency_check, args.local, args.html_report
270
- ):
271
- print("Consistency check successful for {specification}")
272
- else:
273
- print("Consistency check failed for {specification}")
289
+ )
290
+
291
+ if args.consistency_check and successful_check:
292
+ print(f"Consistency check successful for {specification}")
293
+ elif args.consistency_check:
294
+ print(f"Consistency check failed for {specification}")
295
+
296
+ print('completed.')
@@ -18,7 +18,7 @@ class Log(TypedDict):
18
18
  url: NotRequired[str]
19
19
 
20
20
 
21
- class LogMixin:
21
+ class Logger:
22
22
  """
23
23
  A mixin class that provides logging functionality.
24
24