amati 0.2__py3-none-any.whl → 0.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
amati/amati.py CHANGED
@@ -16,7 +16,7 @@ sys.path.insert(0, str(Path(__file__).parent.parent))
16
16
  from amati._error_handler import handle_errors
17
17
  from amati._resolve_forward_references import resolve_forward_references
18
18
  from amati.file_handler import load_file
19
- from amati.logging import Log, LogMixin
19
+ from amati.logging import Log, Logger
20
20
 
21
21
  type JSONPrimitive = str | int | float | bool | None
22
22
  type JSONArray = list["JSONValue"]
@@ -113,9 +113,9 @@ def run(
113
113
 
114
114
  logs: list[Log] = []
115
115
 
116
- with LogMixin.context():
116
+ with Logger.context():
117
117
  result, errors = dispatch(data)
118
- logs.extend(LogMixin.logs)
118
+ logs.extend(Logger.logs)
119
119
 
120
120
  if errors or logs:
121
121
 
@@ -158,19 +158,38 @@ def run(
158
158
  if result and consistency_check:
159
159
  return check(data, result)
160
160
 
161
+ return True
161
162
 
162
- def discover(discover_dir: str = ".") -> list[Path]:
163
+
164
+ def discover(spec: str, discover_dir: str = ".") -> list[Path]:
163
165
  """
164
166
  Finds OpenAPI Specification files to validate
165
167
 
166
168
  Args:
169
+ spec: The path to a specific OpenAPI specification file.
167
170
  discover_dir: The directory to search through.
168
171
  Returns:
169
- A list of paths to validate.
172
+ A list of specifications to validate.
170
173
  """
171
174
 
172
175
  specs: list[Path] = []
173
176
 
177
+ # If a spec is provided, check if it exists and erorr if not
178
+ if spec:
179
+ spec_path = Path(spec)
180
+
181
+ if not spec_path.exists():
182
+ raise FileNotFoundError(f"File {spec} does not exist.")
183
+
184
+ if not spec_path.is_file():
185
+ raise IsADirectoryError(f"{spec} is a directory, not a file.")
186
+
187
+ specs.append(spec_path)
188
+
189
+ # End early if we're not also trying to find files
190
+ if not discover_dir:
191
+ return specs
192
+
174
193
  if Path("openapi.json").exists():
175
194
  specs.append(Path("openapi.json"))
176
195
 
@@ -258,16 +277,20 @@ if __name__ == "__main__":
258
277
  )
259
278
 
260
279
  args = parser.parse_args()
280
+
281
+ print('Starting amati...')
261
282
 
262
- if args.spec:
263
- specifications: list[Path] = [Path(args.spec)]
264
- else:
265
- specifications = discover(args.discover)
283
+ specifications = discover(args.spec, args.discover)
284
+ print(specifications)
266
285
 
267
286
  for specification in specifications:
268
- if successful_check := run(
287
+ successful_check = run(
269
288
  specification, args.consistency_check, args.local, args.html_report
270
- ):
271
- print("Consistency check successful for {specification}")
272
- else:
273
- print("Consistency check failed for {specification}")
289
+ )
290
+
291
+ if args.consistency_check and successful_check:
292
+ print(f"Consistency check successful for {specification}")
293
+ elif args.consistency_check:
294
+ print(f"Consistency check failed for {specification}")
295
+
296
+ print('completed.')
amati/logging.py CHANGED
@@ -18,7 +18,7 @@ class Log(TypedDict):
18
18
  url: NotRequired[str]
19
19
 
20
20
 
21
- class LogMixin:
21
+ class Logger:
22
22
  """
23
23
  A mixin class that provides logging functionality.
24
24
 
amati/model_validators.py CHANGED
@@ -10,7 +10,7 @@ from pydantic._internal._decorators import (
10
10
  PydanticDescriptorProxy,
11
11
  )
12
12
 
13
- from amati.logging import LogMixin
13
+ from amati.logging import Logger
14
14
  from amati.validators.generic import GenericObject
15
15
 
16
16
 
@@ -107,7 +107,7 @@ def at_least_one_of(
107
107
  The validator that ensures at least one public field is non-empty.
108
108
 
109
109
  Example:
110
- >>> LogMixin.logs = []
110
+ >>> Logger.logs = []
111
111
  >>>
112
112
  >>> class User(GenericObject):
113
113
  ... name: str = ""
@@ -116,8 +116,8 @@ def at_least_one_of(
116
116
  ... _reference_uri = "https://example.com"
117
117
  ...
118
118
  >>> user = User()
119
- >>> assert len(LogMixin.logs) == 1
120
- >>> LogMixin.logs = []
119
+ >>> assert len(Logger.logs) == 1
120
+ >>> Logger.logs = []
121
121
 
122
122
  >>> class User(GenericObject):
123
123
  ... name: str = ""
@@ -128,11 +128,11 @@ def at_least_one_of(
128
128
  ...
129
129
  >>>
130
130
  >>> user = User(name="John") # Works fine
131
- >>> assert not LogMixin.logs
131
+ >>> assert not Logger.logs
132
132
  >>> user = User()
133
- >>> assert len(LogMixin.logs) == 1
133
+ >>> assert len(Logger.logs) == 1
134
134
  >>> user = User(age=30)
135
- >>> assert len(LogMixin.logs) == 2
135
+ >>> assert len(Logger.logs) == 2
136
136
 
137
137
 
138
138
  Note:
@@ -157,7 +157,7 @@ def at_least_one_of(
157
157
  public_fields = ", ".join(f"{name}" for name in candidates.keys())
158
158
 
159
159
  msg = f"{public_fields} do not have values, expected at least one."
160
- LogMixin.log(
160
+ Logger.log(
161
161
  {
162
162
  "msg": msg,
163
163
  "type": "value_error",
@@ -191,7 +191,7 @@ def only_one_of(
191
191
  The validator that ensures at one public field is non-empty.
192
192
 
193
193
  Example:
194
- >>> LogMixin.logs = []
194
+ >>> Logger.logs = []
195
195
  >>>
196
196
  >>> class User(GenericObject):
197
197
  ... email: str = ""
@@ -201,10 +201,10 @@ def only_one_of(
201
201
  ...
202
202
  >>> user = User(email="test@example.com") # Works fine
203
203
  >>> user = User(name="123-456-7890") # Works fine
204
- >>> assert not LogMixin.logs
204
+ >>> assert not Logger.logs
205
205
  >>> user = User(email="a@b.com", name="123")
206
- >>> assert LogMixin.logs
207
- >>> LogMixin.logs = []
206
+ >>> assert Logger.logs
207
+ >>> Logger.logs = []
208
208
 
209
209
  >>> class User(GenericObject):
210
210
  ... name: str = ""
@@ -216,11 +216,11 @@ def only_one_of(
216
216
  >>> user = User(name="Bob") # Works fine
217
217
  >>> user = User(email="test@example.com") # Works fine
218
218
  >>> user = User(name="Bob", age=30) # Works fine
219
- >>> assert not LogMixin.logs
219
+ >>> assert not Logger.logs
220
220
  >>> user = User(name="Bob", email="a@b.com")
221
- >>> assert len(LogMixin.logs) == 1
221
+ >>> assert len(Logger.logs) == 1
222
222
  >>> user = User(age=30)
223
- >>> assert len(LogMixin.logs) == 2
223
+ >>> assert len(Logger.logs) == 2
224
224
 
225
225
  Note:
226
226
  Only public fields (not starting with '_') are checked. Private fields
@@ -249,7 +249,7 @@ def only_one_of(
249
249
  field_string = "none"
250
250
  msg = f"Expected at most one field to have a value, {field_string} did"
251
251
 
252
- LogMixin.log(
252
+ Logger.log(
253
253
  {
254
254
  "msg": msg,
255
255
  "type": type_ or "value_error",
@@ -282,7 +282,7 @@ def all_of(
282
282
  The validator that ensures at most one public field is non-empty.
283
283
 
284
284
  Example:
285
- >>> LogMixin.logs = []
285
+ >>> Logger.logs = []
286
286
  >>>
287
287
  >>> class User(GenericObject):
288
288
  ... email: str = ""
@@ -291,11 +291,11 @@ def all_of(
291
291
  ... _reference_uri = "https://example.com"
292
292
  ...
293
293
  >>> user = User(email="a@b.com", name="123") # Works fine
294
- >>> assert not LogMixin.logs
294
+ >>> assert not Logger.logs
295
295
  >>> user = User(email="test@example.com")
296
- >>> assert len(LogMixin.logs) == 1
296
+ >>> assert len(Logger.logs) == 1
297
297
  >>> user = User(name="123-456-7890")
298
- >>> assert len(LogMixin.logs) == 2
298
+ >>> assert len(Logger.logs) == 2
299
299
 
300
300
  >>> class User(GenericObject):
301
301
  ... name: str = ""
@@ -304,17 +304,17 @@ def all_of(
304
304
  ... _all_of = all_of(["name", "email"])
305
305
  ... _reference_uri = "https://example.com"
306
306
  ...
307
- >>> LogMixin.logs = []
307
+ >>> Logger.logs = []
308
308
  >>> user = User(name="Bob", email="a@b.com") # Works fine
309
- >>> assert not LogMixin.logs
309
+ >>> assert not Logger.logs
310
310
  >>> user = User(name="Bob")
311
- >>> assert len(LogMixin.logs) == 1
311
+ >>> assert len(Logger.logs) == 1
312
312
  >>> user = User(email="test@example.com")
313
- >>> assert len(LogMixin.logs) == 2
313
+ >>> assert len(Logger.logs) == 2
314
314
  >>> user = User(age=30)
315
- >>> assert len(LogMixin.logs) == 3
315
+ >>> assert len(Logger.logs) == 3
316
316
  >>> user = User(name="Bob", age=30)
317
- >>> assert len(LogMixin.logs) == 4
317
+ >>> assert len(Logger.logs) == 4
318
318
 
319
319
  Note:
320
320
  Only public fields (not starting with '_') are checked. Private fields
@@ -339,7 +339,7 @@ def all_of(
339
339
  if falsy:
340
340
  msg = f"Expected at all fields to have a value, {", ".join(falsy)} did not"
341
341
 
342
- LogMixin.log(
342
+ Logger.log(
343
343
  {
344
344
  "msg": msg,
345
345
  "type": "value_error",
@@ -378,7 +378,7 @@ def if_then(
378
378
  ValueError: If a condition and consequence are not present
379
379
 
380
380
  Example:
381
- >>> LogMixin.logs = []
381
+ >>> Logger.logs = []
382
382
  >>>
383
383
  >>> class User(GenericObject):
384
384
  ... role: str = ""
@@ -390,11 +390,11 @@ def if_then(
390
390
  ... _reference_uri = "https://example.com"
391
391
  ...
392
392
  >>> user = User(role="admin", can_edit=True) # Works fine
393
- >>> assert not LogMixin.logs
393
+ >>> assert not Logger.logs
394
394
  >>> user = User(role="admin", can_edit=False) # Fails validation
395
- >>> assert len(LogMixin.logs) == 1
395
+ >>> assert len(Logger.logs) == 1
396
396
  >>> user = User(role="user", can_edit=False) # Works fine
397
- >>> assert len(LogMixin.logs) == 1
397
+ >>> assert len(Logger.logs) == 1
398
398
  """
399
399
 
400
400
  @model_validator(mode="after")
@@ -431,7 +431,7 @@ def if_then(
431
431
  if value == actual:
432
432
  continue
433
433
 
434
- LogMixin.log(
434
+ Logger.log(
435
435
  {
436
436
  "msg": f"Expected {field} to be {"in " if iterable else ""}"
437
437
  f"{value} found {actual}",
@@ -20,7 +20,7 @@ from typing import (
20
20
  from pydantic import BaseModel, ConfigDict, PrivateAttr
21
21
  from pydantic_core._pydantic_core import PydanticUndefined
22
22
 
23
- from amati.logging import LogMixin
23
+ from amati.logging import Logger
24
24
 
25
25
 
26
26
  class GenericObject(BaseModel):
@@ -47,7 +47,7 @@ class GenericObject(BaseModel):
47
47
  and field not in self.get_field_aliases()
48
48
  ):
49
49
  message = f"{field} is not a valid field for {self.__repr_name__()}."
50
- LogMixin.log(
50
+ Logger.log(
51
51
  {
52
52
  "msg": message,
53
53
  "type": "value_error",
@@ -79,7 +79,7 @@ class GenericObject(BaseModel):
79
79
 
80
80
  for field in excess_fields:
81
81
  message = f"{field} is not a valid field for {self.__repr_name__()}."
82
- LogMixin.log(
82
+ Logger.log(
83
83
  {
84
84
  "msg": message,
85
85
  "type": "value_error",
@@ -42,7 +42,7 @@ from amati.fields import (
42
42
  from amati.fields.commonmark import CommonMark
43
43
  from amati.fields.json import JSON
44
44
  from amati.fields.oas import OpenAPI, RuntimeExpression
45
- from amati.logging import LogMixin
45
+ from amati.logging import Logger
46
46
  from amati.validators.generic import GenericObject, allow_extra_fields
47
47
 
48
48
  type JSONPrimitive = str | int | float | bool | None
@@ -179,7 +179,7 @@ class ServerVariableObject(GenericObject):
179
179
  return self
180
180
 
181
181
  if self.default not in self.enum:
182
- LogMixin.log(
182
+ Logger.log(
183
183
  {
184
184
  "msg": f"The default value {self.default} is not in the enum list {self.enum}", # pylint: disable=line-too-long
185
185
  "type": "warning",
@@ -647,7 +647,7 @@ class XMLObject(GenericObject):
647
647
  """
648
648
  if value.type == URIType.RELATIVE:
649
649
  message = "XML namespace {value} cannot be a relative URI"
650
- LogMixin.log(
650
+ Logger.log(
651
651
  {
652
652
  "msg": message,
653
653
  "type": "value_error",
@@ -726,7 +726,7 @@ class SchemaObject(GenericObject):
726
726
  # This will validate the structure conforms to JSON Schema
727
727
  validator_cls(meta_schema).validate(schema_dict) # type: ignore
728
728
  except JSONVSchemeValidationError as e:
729
- LogMixin.log(
729
+ Logger.log(
730
730
  {
731
731
  "msg": f"Invalid JSON Schema: {e.message}",
732
732
  "type": "value_error",
@@ -37,7 +37,7 @@ from amati.fields.commonmark import CommonMark
37
37
  from amati.fields.json import JSON
38
38
  from amati.fields.oas import OpenAPI
39
39
  from amati.fields.spdx_licences import VALID_LICENCES
40
- from amati.logging import LogMixin
40
+ from amati.logging import Logger
41
41
  from amati.validators.generic import GenericObject, allow_extra_fields
42
42
  from amati.validators.oas304 import (
43
43
  CallbackObject,
@@ -105,7 +105,7 @@ class LicenceObject(GenericObject):
105
105
  try:
106
106
  SPDXURL(self.url)
107
107
  except AmatiValueError:
108
- LogMixin.log(
108
+ Logger.log(
109
109
  {
110
110
  "msg": f"{str(self.url)} is not a valid SPDX URL",
111
111
  "type": "warning",
@@ -122,7 +122,7 @@ class LicenceObject(GenericObject):
122
122
  and self.identifier
123
123
  and str(self.url) not in VALID_LICENCES[self.identifier]
124
124
  ):
125
- LogMixin.log(
125
+ Logger.log(
126
126
  {
127
127
  "msg": f"{self.url} is not associated with the identifier {self.identifier}", # pylint: disable=line-too-long
128
128
  "type": "warning",
@@ -214,7 +214,7 @@ class ServerVariableObject(GenericObject):
214
214
  return self
215
215
 
216
216
  if self.default not in self.enum:
217
- LogMixin.log(
217
+ Logger.log(
218
218
  {
219
219
  "msg": f"The default value {self.default} is not in the enum list {self.enum}", # pylint: disable=line-too-long
220
220
  "type": "value_error",
@@ -396,7 +396,7 @@ class SchemaObject(GenericObject):
396
396
  # This will validate the structure conforms to JSON Schema
397
397
  validator_cls(meta_schema).validate(schema_dict) # type: ignore
398
398
  except JSONVSchemeValidationError as e:
399
- LogMixin.log(
399
+ Logger.log(
400
400
  {
401
401
  "msg": f"Invalid JSON Schema: {e.message}",
402
402
  "type": "value_error",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: amati
3
- Version: 0.2
3
+ Version: 0.2.2
4
4
  Summary: Validates that a .yaml or .json file conforms to the OpenAPI Specifications 3.x.
5
5
  Project-URL: Homepage, https://github.com/ben-alexander/amati
6
6
  Project-URL: Issues, https://github.com/ben-alexander/amati/issues
@@ -28,7 +28,7 @@ amati is designed to validate that a file conforms to the [OpenAPI Specification
28
28
 
29
29
  ## Name
30
30
 
31
- amati means to observe in Malay, especially with attention to detail. It's also one of the plurals of beloved or favourite in Italian.
31
+ "amati" means to observe in Malay, especially with attention to detail. It's also one of the plurals of beloved or favourite in Italian.
32
32
 
33
33
  ## Usage
34
34
 
@@ -59,34 +59,47 @@ A Dockerfile is available on [DockerHub](https://hub.docker.com/r/benale/amati/t
59
59
  To run against a specific specification the location of the specification needs to be mounted in the container.
60
60
 
61
61
  ```sh
62
- docker run -v "<path-to-mount>:/<mount-name> amati <options>
62
+ docker run -v "<path-to-mount>:/<mount-name> amati:alpha <options>
63
63
  ```
64
64
 
65
65
  e.g.
66
66
 
67
67
  ```sh
68
- docker run -v /Users/myuser/myrepo:/data amati --spec data/myspec.yaml --hr
68
+ docker run -v /Users/myuser/myrepo:/data amati:alpha --spec data/myspec.yaml --hr
69
69
  ```
70
70
 
71
71
  ## Architecture
72
72
 
73
- This uses Pydantic, especially the validation, and Typing to construct the entire OAS as a single data type. Passing a dictionary to the top-level data type runs all the validation in the Pydantic models constructing a single set of inherited classes and datatypes that validate that the API specification is accurate.
73
+ amati uses Pydantic, especially the validation, and Typing to construct the entire OAS as a single data type. Passing a dictionary to the top-level data type runs all the validation in the Pydantic models constructing a single set of inherited classes and datatypes that validate that the API specification is accurate. To the extent that Pydantic is functional, amati has a [functional core and an imperative shell](https://www.destroyallsoftware.com/screencasts/catalog/functional-core-imperative-shell).
74
74
 
75
75
  Where the specification conforms, but relies on implementation-defined behavior (e.g. [data type formats](https://spec.openapis.org/oas/v3.1.1.html#data-type-format)), a warning will be raised.
76
76
 
77
77
  ## Contributing
78
78
 
79
- ### Requirements
79
+ ### Prerequisites
80
80
 
81
81
  * The latest version of [uv](https://docs.astral.sh/uv/)
82
82
  * [git 2.49+](https://git-scm.com/downloads/linux)
83
+ * [Docker](https://docs.docker.com/engine/install/)
84
+
85
+ ### Starting
86
+
87
+ The project uses a [`pyproject.toml` file](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#writing-pyproject-toml) to determine what to build.
88
+
89
+ To get started run:
90
+
91
+ ```sh
92
+ uv python install
93
+ uv venv
94
+ uv sync
95
+ ```
83
96
 
84
97
  ### Testing and formatting
85
98
 
86
99
  This project uses:
87
100
 
88
101
  * [Pytest](https://docs.pytest.org/en/stable/) as a testing framework
89
- * [PyLance](https://marketplace.visualstudio.com/items?itemName=ms-python.vscode-pylance) on strict mode for type checking
102
+ * [Pyright](https://microsoft.github.io/pyright/#/) on strict mode for type checking
90
103
  * [Pylint](https://www.pylint.org/) as a linter, using a modified version from [Google's style guide](https://google.github.io/styleguide/pyguide.html)
91
104
  * [Hypothesis](https://hypothesis.readthedocs.io/en/latest/index.html) for test data generation
92
105
  * [Coverage](https://coverage.readthedocs.io/en/7.6.8/) on both the tests and code for test coverage
@@ -94,33 +107,20 @@ This project uses:
94
107
  * [isort](https://pycqa.github.io/isort/) for import sorting
95
108
 
96
109
  It's expected that there are no errors and 100% of the code is reached and executed. The strategy for test coverage is based on parsing test specifications and not unit tests.
97
-
98
- amati runs tests on external specifications, detailed in `tests/data/.amati.tests.yaml`. To be able to run these tests the appropriate GitHub repos need to be local. Specific revisions of the repos can be downloaded by running
110
+ amati runs tests on the external specifications, detailed in `tests/data/.amati.tests.yaml`. To be able to run these tests the GitHub repos containing the specifications need to be available locally. Specific revisions of the repos can be downloaded by running the following, which will clone the repos into `../amati-tests-specs/<repo-name>`.
99
111
 
100
112
  ```sh
101
113
  python scripts/tests/setup_test_specs.py
102
114
  ```
103
115
 
116
+ If there are some issues with the specification a JSON file detailing those should be placed into `tests/data/` and the name of that file noted in `tests/data/.amati.tests.yaml` for the test suite to pick it up and check that the errors are expected. Any specifications that close the coverage gap are gratefully received.
117
+
104
118
  To run everything, from linting, type checking to downloading test specs and building and testing the Docker image run:
105
119
 
106
120
  ```sh
107
121
  sh bin/checks.sh
108
122
  ```
109
123
 
110
- You will need to have Docker installed.
111
-
112
- ### Building
113
-
114
- The project uses a [`pyproject.toml` file](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#writing-pyproject-toml) to determine what to build.
115
-
116
- To install, assuming that [uv](https://docs.astral.sh/uv/) is already installed and initialised
117
-
118
- ```sh
119
- uv python install
120
- uv venv
121
- uv sync
122
- ```
123
-
124
124
  ### Docker
125
125
 
126
126
  A development Docker image is provided, `Dockerfile.dev`, to build:
@@ -129,7 +129,7 @@ A development Docker image is provided, `Dockerfile.dev`, to build:
129
129
  docker build -t amati -f Dockerfile .
130
130
  ```
131
131
 
132
- and to run against a specific specification the location of the specification needs to be mounted in the container.
132
+ to run against a specific specification the location of the specification needs to be mounted in the container.
133
133
 
134
134
  ```sh
135
135
  docker run -v "<path-to-mount>:/<mount-name> amati <options>
@@ -138,13 +138,13 @@ docker run -v "<path-to-mount>:/<mount-name> amati <options>
138
138
  This can be tested against a provided specification, from the root directory
139
139
 
140
140
  ```sh
141
- docker run --detach -v "$(pwd):/data" amati
141
+ docker run --detach -v "$(pwd):/data" amati <options>
142
142
  ```
143
143
 
144
144
 
145
145
  ### Data
146
146
 
147
- There are some scripts to create the data needed by the project, for example, all the possible licences. If the data needs to be refreshed this can be done by running the contents of `/scripts/data`.
147
+ There are some scripts to create the data needed by the project, for example, all the registered TLDs. To refresh the data, run the contents of `/scripts/data`.
148
148
 
149
149
 
150
150
 
@@ -1,11 +1,11 @@
1
1
  amati/__init__.py,sha256=bsPoaYcFmejsqQN6eDxcuGKXg8ZXgk1NoD6Hujwhkkw,428
2
2
  amati/_error_handler.py,sha256=s_Nhnq8hz7xQP_yeCCJ8Hwzs5xyrxlisd1XFYYECtSY,1244
3
3
  amati/_resolve_forward_references.py,sha256=iOibCv_XuIUpe7qbRBtPzd7H5rvL-aldUzlXV_9FJaw,6511
4
- amati/amati.py,sha256=sgWQrlvvusK8EgdcsYkCZWUNiv0TAHV8jRivUCBByQA,7698
4
+ amati/amati.py,sha256=y-dHwu137R18hC-vWmOMmmFvagOJ0rpIsMwFUG4QOpw,8367
5
5
  amati/exceptions.py,sha256=X_RDBVjcFn_gXPFkhKuREmm5nDCdGs4rBy6sZsXBJDo,609
6
6
  amati/file_handler.py,sha256=h95t7TDxDA_qr4rIe2bddCjh2Xw5Ht27yLK7zCQzT68,5085
7
- amati/logging.py,sha256=X37gLui5Y94X1mlJIzkt3fQDwPcHlZiP3QBnM5Ipric,1339
8
- amati/model_validators.py,sha256=zKK9G-xaERc-e9AmWBLzCIGKkmpi0nMMx94N_nd8fs8,14979
7
+ amati/logging.py,sha256=srg-T3z4mDFlP-RpcQhz685omnL24nXTthrTEIfKNTg,1337
8
+ amati/model_validators.py,sha256=ffOmhhGRGPKZsr9TPW1q_57PoLfSq7zYNTq86t3BvJc,14915
9
9
  amati/data/http-status-codes.json,sha256=xEGBlE7eCXaQFG2gNSeK1aCs7A8XxOe6JcXwAKxwWBU,10903
10
10
  amati/data/iso9110.json,sha256=YLv6V8dPrgagjAnHUWKMmade5xboV_eJB9smnUfwgDY,1692
11
11
  amati/data/media-types.json,sha256=tSRZTUqIQeiW5cmw5aDAWNqXLGcvX4gwwEkDL5H6lDg,47108
@@ -27,11 +27,11 @@ amati/grammars/oas.py,sha256=vTMnJIxeiTC3gJATOo0RRZDlRsQOWkY6jG1VRzazLjM,1666
27
27
  amati/grammars/rfc6901.py,sha256=ChpKnoPDTsaNMvJT_UCpY8xwbl9zWZcD36MjRsD8IpQ,656
28
28
  amati/grammars/rfc7159.py,sha256=eksX1m7WRzQ9iGeANG1BgmsXbAhV6gQVRkPpLqPjq6U,2218
29
29
  amati/validators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
- amati/validators/generic.py,sha256=AYIHIuEHpf1p-lCynBp-YGaayIZOylzNgGi533ykVbc,3983
31
- amati/validators/oas304.py,sha256=84sAc37azkgrOt29scjRtYtJGypnFZd5UkV9VGLsX8c,32472
32
- amati/validators/oas311.py,sha256=FVMOjNXWlZhq6fgah0NGGPlgdcSZFgVxVbEwWnk4Sn4,17712
33
- amati-0.2.dist-info/METADATA,sha256=pDJg9ueeR6-_otHva88kNPxiEy3e-AAMIRvxsc4RFhg,5916
34
- amati-0.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
35
- amati-0.2.dist-info/entry_points.txt,sha256=sacBb6g0f0ZJtNjNYx93_Xe4y5xzawvklCFVXup9ru0,37
36
- amati-0.2.dist-info/licenses/LICENSE,sha256=WAA01ZXeNs1bwpNWKR6aVucjtYjYm_iQIUYkCAENjqM,1070
37
- amati-0.2.dist-info/RECORD,,
30
+ amati/validators/generic.py,sha256=huFG9_Fbk5ih8y9S7fl8vRGl5Fr1158yb08967oUjw4,3977
31
+ amati/validators/oas304.py,sha256=KUthNRVLRCjcLvN6IQuqLAUmui0WfKc8llRFt0qJaf4,32464
32
+ amati/validators/oas311.py,sha256=XX8nUq0mGrbe5FR9C0_mTmIzMnWG09XbGwqpt8GHY8g,17702
33
+ amati-0.2.2.dist-info/METADATA,sha256=jFpRbk4OdLgFrxuedrk9VjM0J-kqmNxfI7c93SzHWQA,6425
34
+ amati-0.2.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
35
+ amati-0.2.2.dist-info/entry_points.txt,sha256=sacBb6g0f0ZJtNjNYx93_Xe4y5xzawvklCFVXup9ru0,37
36
+ amati-0.2.2.dist-info/licenses/LICENSE,sha256=WAA01ZXeNs1bwpNWKR6aVucjtYjYm_iQIUYkCAENjqM,1070
37
+ amati-0.2.2.dist-info/RECORD,,
File without changes