arkindex-client 1.1.1__tar.gz → 1.1.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/PKG-INFO +1 -1
  2. arkindex-client-1.1.3/VERSION +1 -0
  3. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/arkindex/client/client.py +187 -79
  4. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/arkindex/client/decoders.py +5 -4
  5. arkindex-client-1.1.3/arkindex/document.py +128 -0
  6. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/arkindex/pagination.py +79 -37
  7. arkindex-client-1.1.3/arkindex/schema/openapi.py +275 -0
  8. arkindex-client-1.1.3/arkindex/schema/validator.py +22 -0
  9. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/arkindex_client.egg-info/PKG-INFO +1 -1
  10. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/arkindex_client.egg-info/SOURCES.txt +0 -3
  11. arkindex-client-1.1.3/arkindex_client.egg-info/requires.txt +3 -0
  12. arkindex-client-1.1.3/requirements.txt +3 -0
  13. arkindex-client-1.1.1/VERSION +0 -1
  14. arkindex-client-1.1.1/arkindex/client/base.py +0 -98
  15. arkindex-client-1.1.1/arkindex/client/transports.py +0 -132
  16. arkindex-client-1.1.1/arkindex/document.py +0 -212
  17. arkindex-client-1.1.1/arkindex/schema/jsonschema.py +0 -66
  18. arkindex-client-1.1.1/arkindex/schema/openapi.py +0 -523
  19. arkindex-client-1.1.1/arkindex/schema/validator.py +0 -54
  20. arkindex-client-1.1.1/arkindex_client.egg-info/requires.txt +0 -4
  21. arkindex-client-1.1.1/requirements.txt +0 -4
  22. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/LICENSE +0 -0
  23. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/MANIFEST.in +0 -0
  24. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/README.md +0 -0
  25. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/apistar/__init__.py +0 -0
  26. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/apistar/exceptions.py +0 -0
  27. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/arkindex/__init__.py +0 -0
  28. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/arkindex/auth.py +0 -0
  29. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/arkindex/client/__init__.py +0 -0
  30. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/arkindex/compat.py +0 -0
  31. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/arkindex/exceptions.py +0 -0
  32. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/arkindex/mock.py +0 -0
  33. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/arkindex/schema/__init__.py +0 -0
  34. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/arkindex_client.egg-info/dependency_links.txt +0 -0
  35. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/arkindex_client.egg-info/top_level.txt +0 -0
  36. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/requirements-docs.txt +0 -0
  37. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/setup.cfg +0 -0
  38. {arkindex-client-1.1.1 → arkindex-client-1.1.3}/setup.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: arkindex-client
3
- Version: 1.1.1
3
+ Version: 1.1.3
4
4
  Summary: API client for the Arkindex project
5
5
  Home-page: https://gitlab.teklia.com/arkindex/api-client
6
6
  Author: Teklia <contact@teklia.com>
@@ -0,0 +1 @@
1
+ 1.1.3
@@ -2,14 +2,16 @@
2
2
  """
3
3
  Arkindex API Client
4
4
  """
5
+ import json
5
6
  import logging
6
7
  import os
7
8
  import warnings
9
+ from importlib.metadata import version
8
10
  from time import sleep
9
- from urllib.parse import urljoin, urlsplit, urlunsplit
11
+ from urllib.parse import quote, urljoin, urlparse, urlsplit
10
12
 
11
13
  import requests
12
- import yaml
14
+ import typesystem
13
15
  from tenacity import (
14
16
  before_sleep_log,
15
17
  retry,
@@ -19,20 +21,14 @@ from tenacity import (
19
21
  )
20
22
 
21
23
  from arkindex.auth import TokenSessionAuthentication
22
- from arkindex.client.base import BaseClient
23
- from arkindex.exceptions import ErrorResponse, SchemaError
24
+ from arkindex.client import decoders
25
+ from arkindex.exceptions import ClientError, ErrorMessage, ErrorResponse, SchemaError
24
26
  from arkindex.pagination import ResponsePaginator
27
+ from arkindex.schema.validator import validate
25
28
 
26
29
  logger = logging.getLogger(__name__)
27
30
 
28
- try:
29
- from yaml import CSafeLoader as SafeLoader
30
-
31
- logger.debug("Using LibYAML-based parser")
32
- except ImportError:
33
- from yaml import SafeLoader
34
-
35
- logger.debug("Using default PyYAML parser")
31
+ REQUEST_TIMEOUT = (30, 60)
36
32
 
37
33
  BASE_DIR = os.path.dirname(os.path.abspath(__file__))
38
34
 
@@ -82,22 +78,7 @@ def options_from_env():
82
78
  return options
83
79
 
84
80
 
85
- def _find_operation(schema, operation_id):
86
- for path_object in schema["paths"].values():
87
- for operation in path_object.values():
88
- if operation["operationId"] == operation_id:
89
- return operation
90
- raise KeyError("Operation '{}' not found".format(operation_id))
91
-
92
-
93
- def _find_param(operation, param_name):
94
- for parameter in operation.get("parameters", []):
95
- if parameter["name"] == param_name:
96
- return parameter
97
- raise KeyError("Parameter '{}' not found".format(param_name))
98
-
99
-
100
- class ArkindexClient(BaseClient):
81
+ class ArkindexClient:
101
82
  """
102
83
  An Arkindex API client.
103
84
  """
@@ -111,7 +92,6 @@ class ArkindexClient(BaseClient):
111
92
  csrf_cookie=None,
112
93
  sleep=0,
113
94
  verify=True,
114
- **kwargs,
115
95
  ):
116
96
  r"""
117
97
  :param token: An API token to use. If omitted, access is restricted to public endpoints.
@@ -125,55 +105,49 @@ class ArkindexClient(BaseClient):
125
105
  :type csrf_cookie: str or None
126
106
  :param float sleep: Number of seconds to wait before sending each API request,
127
107
  as a simple means of throttling.
128
- :param \**kwargs: Keyword arguments to send to ``arkindex.client.base.BaseClient``.
108
+ :param bool verify: Whether to verify the SSL certificate on each request. Enabled by default.
129
109
  """
110
+ self.decoders = [
111
+ decoders.JSONDecoder(),
112
+ decoders.TextDecoder(),
113
+ decoders.DownloadDecoder(),
114
+ ]
115
+
116
+ self.session = requests.Session()
117
+ self.session.verify = verify
118
+ client_version = version("arkindex-client")
119
+ self.session.headers.update(
120
+ {
121
+ "accept": ", ".join([decoder.media_type for decoder in self.decoders]),
122
+ "user-agent": f"arkindex-client/{client_version}",
123
+ }
124
+ )
125
+
130
126
  if not schema_url:
131
127
  schema_url = urljoin(base_url, SCHEMA_ENDPOINT)
132
128
 
133
- self.verify = verify
134
129
  try:
135
130
  split = urlsplit(schema_url)
136
131
  if split.scheme == "file" or not (split.scheme or split.netloc):
137
132
  # This is a local path
138
133
  with open(schema_url) as f:
139
- schema = yaml.load(f, Loader=SafeLoader)
134
+ schema = json.load(f)
140
135
  else:
141
- resp = requests.get(schema_url, verify=self.verify)
136
+ resp = self.session.get(
137
+ schema_url,
138
+ headers={
139
+ # Explicitly request an OpenAPI schema in JSON and not YAML
140
+ "Accept": "application/vnd.oai.openapi+json, application/json",
141
+ },
142
+ )
142
143
  resp.raise_for_status()
143
- schema = yaml.load(resp.content, Loader=SafeLoader)
144
+ schema = resp.json()
144
145
  except Exception as e:
145
146
  raise SchemaError(
146
147
  f"Could not retrieve a proper OpenAPI schema from {schema_url}"
147
148
  ) from e
148
149
 
149
- super().__init__(schema, **kwargs)
150
-
151
- # An OpenAPI schema is considered valid even when there are no endpoints, making the client completely useless.
152
- if not len(self.document.walk_links()):
153
- raise SchemaError(
154
- f"The OpenAPI schema from {base_url} has no defined endpoints"
155
- )
156
-
157
- # Post-processing of the parsed schema
158
- for link_info in self.document.walk_links():
159
- # Look for deprecated links
160
- # https://github.com/encode/apistar/issues/664
161
- operation = _find_operation(schema, link_info.link.name)
162
- link_info.link.deprecated = operation.get("deprecated", False)
163
- for item in link_info.link.get_query_fields():
164
- parameter = _find_param(operation, item.name)
165
- item.deprecated = parameter.get("deprecated", False)
166
-
167
- # Detect paginated links
168
- if "x-paginated" in operation:
169
- link_info.link._paginated = operation["x-paginated"]
170
-
171
- # Remove domains from each endpoint; allows to properly handle our base URL
172
- # https://github.com/encode/apistar/issues/657
173
- original_url = urlsplit(link_info.link.url)
174
- # Removes the scheme and netloc
175
- new_url = ("", "", *original_url[2:])
176
- link_info.link.url = urlunsplit(new_url)
150
+ self.document = validate(schema)
177
151
 
178
152
  # Try to autodetect the CSRF cookie:
179
153
  # - Try to find a matching server for this base URL and look for the x-csrf-cookie extension
@@ -228,7 +202,7 @@ class ArkindexClient(BaseClient):
228
202
  """
229
203
  if not csrf_cookie:
230
204
  csrf_cookie = "arkindex.csrf"
231
- self.transport.session.auth = TokenSessionAuthentication(
205
+ self.session.auth = TokenSessionAuthentication(
232
206
  token,
233
207
  csrf_cookie_name=csrf_cookie,
234
208
  scheme=auth_scheme,
@@ -242,7 +216,15 @@ class ArkindexClient(BaseClient):
242
216
  self.document.url = base_url
243
217
 
244
218
  # Add the Referer header to allow Django CSRF to function
245
- self.transport.headers.setdefault("Referer", self.document.url)
219
+ self.session.headers.setdefault("Referer", self.document.url)
220
+
221
+ def lookup_operation(self, operation_id: str):
222
+ if operation_id in self.document.links:
223
+ return self.document.links[operation_id]
224
+
225
+ text = 'Operation ID "%s" not found in schema.' % operation_id
226
+ message = ErrorMessage(text=text, code="invalid-operation")
227
+ raise ClientError(messages=[message])
246
228
 
247
229
  def paginate(self, operation_id, *args, **kwargs):
248
230
  """
@@ -251,9 +233,10 @@ class ArkindexClient(BaseClient):
251
233
  :return: An iterator for a paginated endpoint.
252
234
  :rtype: Union[arkindex.pagination.ResponsePaginator, dict, list]
253
235
  """
236
+
254
237
  link = self.lookup_operation(operation_id)
255
238
  # If there was no x-paginated, trust the caller and assume the endpoint is paginated
256
- if getattr(link, "_paginated", True):
239
+ if link.paginated is not False:
257
240
  return ResponsePaginator(self, operation_id, *args, **kwargs)
258
241
  return self.request(operation_id, *args, **kwargs)
259
242
 
@@ -264,15 +247,77 @@ class ArkindexClient(BaseClient):
264
247
  """
265
248
  resp = self.request("Login", body={"email": email, "password": password})
266
249
  if "auth_token" in resp:
267
- self.transport.session.auth.scheme = "Token"
268
- self.transport.session.auth.token = resp["auth_token"]
250
+ self.session.auth.scheme = "Token"
251
+ self.session.auth.token = resp["auth_token"]
269
252
  return resp
270
253
 
271
- def single_request(self, operation_id, *args, **kwargs):
254
+ def get_query_params(self, link, params):
255
+ return {
256
+ field.name: params[field.name]
257
+ for field in link.get_query_fields()
258
+ if field.name in params
259
+ }
260
+
261
+ def get_url(self, link, params):
262
+ url = urljoin(self.document.url, link.url)
263
+
264
+ scheme = urlparse(url).scheme.lower()
265
+
266
+ if not scheme:
267
+ text = "URL missing scheme '%s'." % url
268
+ message = ErrorMessage(text=text, code="invalid-url")
269
+ raise ClientError(messages=[message])
270
+
271
+ if scheme not in ("http", "https"):
272
+ text = "Unsupported URL scheme '%s'." % scheme
273
+ message = ErrorMessage(text=text, code="invalid-url")
274
+ raise ClientError(messages=[message])
275
+
276
+ for field in link.get_path_fields():
277
+ value = str(params[field.name])
278
+ if "{%s}" % field.name in url:
279
+ url = url.replace("{%s}" % field.name, quote(value, safe=""))
280
+ elif "{+%s}" % field.name in url:
281
+ url = url.replace("{+%s}" % field.name, quote(value, safe="/"))
282
+
283
+ return url
284
+
285
+ def get_content(self, link, params):
286
+ body_field = link.get_body_field()
287
+ if body_field and body_field.name in params:
288
+ assert (
289
+ link.encoding == "application/json"
290
+ ), "Only JSON request bodies are supported"
291
+ return params[body_field.name]
292
+
293
+ def get_decoder(self, content_type=None):
294
+ """
295
+ Given the value of a 'Content-Type' header, return the appropriate
296
+ decoder for handling the response content.
297
+ """
298
+ if content_type is None:
299
+ return self.decoders[0]
300
+
301
+ content_type = content_type.split(";")[0].strip().lower()
302
+ main_type = content_type.split("/")[0] + "/*"
303
+ wildcard_type = "*/*"
304
+
305
+ for codec in self.decoders:
306
+ if codec.media_type in (content_type, main_type, wildcard_type):
307
+ return codec
308
+
309
+ text = (
310
+ "Unsupported encoding '%s' in response Content-Type header." % content_type
311
+ )
312
+ message = ErrorMessage(text=text, code="cannot-decode-response")
313
+ raise ClientError(messages=[message])
314
+
315
+ def single_request(self, operation_id, **parameters):
272
316
  """
273
317
  Perform an API request.
274
- :param args: Arguments passed to the BaseClient.
275
- :param kwargs: Keyword arguments passed to the BaseClient.
318
+
319
+ :param str operation_id: Name of the API endpoint.
320
+ :param path_parameters: Path parameters for this endpoint.
276
321
  """
277
322
  link = self.lookup_operation(operation_id)
278
323
  if link.deprecated:
@@ -282,8 +327,23 @@ class ArkindexClient(BaseClient):
282
327
  stacklevel=2,
283
328
  )
284
329
 
285
- query_params = self.get_query_params(link, kwargs)
330
+ validator = typesystem.Object(
331
+ properties={field.name: typesystem.Any() for field in link.fields},
332
+ required=[field.name for field in link.fields if field.required],
333
+ additional_properties=False,
334
+ )
335
+ try:
336
+ validator.validate(parameters)
337
+ except typesystem.ValidationError as exc:
338
+ raise ClientError(messages=exc.messages()) from None
339
+
340
+ method = link.method
341
+ url = self.get_url(link, parameters)
342
+
343
+ content = self.get_content(link, parameters)
344
+ query_params = self.get_query_params(link, parameters)
286
345
  fields = link.get_query_fields()
346
+
287
347
  for field in fields:
288
348
  if field.deprecated and field.name in query_params:
289
349
  warnings.warn(
@@ -291,12 +351,41 @@ class ArkindexClient(BaseClient):
291
351
  DeprecationWarning,
292
352
  stacklevel=2,
293
353
  )
354
+
294
355
  if self.sleep_duration:
295
356
  logger.debug(
296
357
  "Delaying request by {:f} seconds...".format(self.sleep_duration)
297
358
  )
298
359
  sleep(self.sleep_duration)
299
- return super().request(operation_id, *args, **kwargs)
360
+
361
+ return self._send_request(
362
+ method, url, query_params=query_params, content=content
363
+ )
364
+
365
+ def _send_request(self, method, url, query_params=None, content=None):
366
+ options = {
367
+ "params": query_params,
368
+ "timeout": REQUEST_TIMEOUT,
369
+ }
370
+ if content is not None:
371
+ options["json"] = content
372
+
373
+ response = self.session.request(method, url, **options)
374
+
375
+ # Given an HTTP response, return the decoded data.
376
+ result = None
377
+ if response.content:
378
+ content_type = response.headers.get("content-type")
379
+ decoder = self.get_decoder(content_type)
380
+ result = decoder.decode(response)
381
+
382
+ if 400 <= response.status_code <= 599:
383
+ title = "%d %s" % (response.status_code, response.reason)
384
+ raise ErrorResponse(
385
+ title=title, status_code=response.status_code, content=result
386
+ )
387
+
388
+ return result
300
389
 
301
390
  @retry(
302
391
  retry=retry_if_exception(_is_500_error),
@@ -305,15 +394,34 @@ class ArkindexClient(BaseClient):
305
394
  stop=stop_after_attempt(5),
306
395
  before_sleep=before_sleep_log(logger, logging.INFO),
307
396
  )
308
- def request(self, operation_id, *args, **kwargs):
397
+ def request(self, operation_id, **parameters):
309
398
  """
310
- Proxy all Arkindex API requests with a retry mechanism in case of 50X errors.
311
- The same API call will be retried 5 times, with an exponential sleep time
312
- going through 3, 4, 8 and 16 seconds of wait between call.
399
+ Perform an API request with an automatic retry mechanism in case of 50X errors.
400
+ A failing API call will be retried 5 times, with an exponential sleep time going
401
+ through 3, 4, 8 and 16 seconds of wait between call.
313
402
  If the 5th call still gives a 50x, the exception is re-raised and the caller should catch it.
314
403
  Log messages are displayed before sleeping (when at least one exception occurred).
315
404
 
316
- :param args: Arguments passed to the BaseClient.
317
- :param kwargs: Keyword arguments passed to the BaseClient.
405
+ :param str operation_id: Name of the API endpoint.
406
+ :param parameters: Body, Path or Query parameters passed as kwargs.
407
+ Body parameters must be passed using the `body` keyword argument, others can be set directly.
408
+
409
+ Example usage for POST and unpaginated GET requests:
410
+
411
+ >>> request(
412
+ ... "CreateMetaDataBulk",
413
+ ... id="8f8f196f-49bc-444e-9cfe-c705c3cd01ae",
414
+ ... body={
415
+ ... "worker_run_id": "50e1f2d4-2087-41ed-a862-d17576bae480",
416
+ ... "metadata_list": [
417
+ ... …
418
+ ... ],
419
+ ... },
420
+ ... )
421
+ >>> request(
422
+ ... "ListElements",
423
+ ... corpus="7358ab03-cc36-4160-86ce-98f70e993a0f",
424
+ ... top_level=True,
425
+ ... )
318
426
  """
319
- return self.single_request(operation_id, *args, **kwargs)
427
+ return self.single_request(operation_id, **parameters)
@@ -1,10 +1,10 @@
1
1
  # -*- coding: utf-8 -*-
2
- import cgi
3
2
  import json
4
3
  import os
5
4
  import posixpath
6
5
  import shutil
7
6
  import tempfile
7
+ from email.message import EmailMessage
8
8
  from urllib.parse import urlparse
9
9
 
10
10
  from arkindex.compat import DownloadedFile
@@ -213,10 +213,11 @@ def _get_filename_from_content_disposition(content_disposition):
213
213
  """
214
214
  Determine an output filename based on the `Content-Disposition` header.
215
215
  """
216
- params = value, params = cgi.parse_header(content_disposition)
216
+ message = EmailMessage()
217
+ message["content-disposition"] = content_disposition
218
+ filename = message["content-disposition"].params.get("filename")
217
219
 
218
- if "filename" in params:
219
- filename = params["filename"]
220
+ if filename:
220
221
  return _safe_filename(filename)
221
222
 
222
223
  return None
@@ -0,0 +1,128 @@
1
+ # -*- coding: utf-8 -*-
2
+ import re
3
+ import typing
4
+
5
+ from arkindex.exceptions import SchemaError
6
+
7
+
8
+ class Document:
9
+ def __init__(
10
+ self,
11
+ links: typing.Sequence["Link"],
12
+ url: str = "",
13
+ ):
14
+ if not len(links):
15
+ raise SchemaError(
16
+ "An OpenAPI document must contain at least one valid operation."
17
+ )
18
+
19
+ links_by_name = {}
20
+
21
+ # Ensure all names within a document are unique.
22
+ for link in links:
23
+ assert (
24
+ link.name not in links_by_name
25
+ ), f'Link "{link.name}" in Document must have a unique name.'
26
+ links_by_name[link.name] = link
27
+
28
+ self.links = links_by_name
29
+ self.url = url
30
+
31
+
32
+ class Link:
33
+ """
34
+ Links represent the actions that a client may perform.
35
+ """
36
+
37
+ def __init__(
38
+ self,
39
+ url: str,
40
+ method: str,
41
+ handler: typing.Callable = None,
42
+ name: str = "",
43
+ encoding: str = "",
44
+ fields: typing.Sequence["Field"] = None,
45
+ deprecated: bool = False,
46
+ paginated: typing.Optional[bool] = None,
47
+ ):
48
+ method = method.upper()
49
+ fields = [] if (fields is None) else list(fields)
50
+
51
+ url_path_names = set(
52
+ [item.strip("{}").lstrip("+") for item in re.findall("{[^}]*}", url)]
53
+ )
54
+ path_fields = [field for field in fields if field.location == "path"]
55
+ body_fields = [field for field in fields if field.location == "body"]
56
+
57
+ assert method in (
58
+ "GET",
59
+ "POST",
60
+ "PUT",
61
+ "PATCH",
62
+ "DELETE",
63
+ "OPTIONS",
64
+ "HEAD",
65
+ "TRACE",
66
+ )
67
+ assert len(body_fields) < 2
68
+ if body_fields:
69
+ assert encoding
70
+ for field in path_fields:
71
+ assert field.name in url_path_names
72
+
73
+ # Add in path fields for any "{param}" items that don't already have
74
+ # a corresponding path field.
75
+ for path_name in url_path_names:
76
+ if path_name not in [field.name for field in path_fields]:
77
+ fields += [Field(name=path_name, location="path", required=True)]
78
+
79
+ self.url = url
80
+ self.method = method
81
+ self.handler = handler
82
+ self.name = name if name else handler.__name__
83
+ self.encoding = encoding
84
+ self.fields = fields
85
+ self.deprecated = deprecated
86
+ self.paginated = paginated
87
+
88
+ def get_path_fields(self):
89
+ return [field for field in self.fields if field.location == "path"]
90
+
91
+ def get_query_fields(self):
92
+ return [field for field in self.fields if field.location == "query"]
93
+
94
+ def get_body_field(self):
95
+ for field in self.fields:
96
+ if field.location == "body":
97
+ return field
98
+ return None
99
+
100
+ def get_expanded_body(self):
101
+ field = self.get_body_field()
102
+ if field is None or not hasattr(field.schema, "properties"):
103
+ return None
104
+ return field.schema.properties
105
+
106
+
107
+ class Field:
108
+ def __init__(
109
+ self,
110
+ name: str,
111
+ location: str,
112
+ required: bool = None,
113
+ schema: typing.Any = None,
114
+ example: typing.Any = None,
115
+ deprecated: bool = False,
116
+ ):
117
+ assert location in ("path", "query", "body", "cookie", "header", "formData")
118
+ if required is None:
119
+ required = True if location in ("path", "body") else False
120
+ if location == "path":
121
+ assert required, "May not set 'required=False' on path fields."
122
+
123
+ self.name = name
124
+ self.location = location
125
+ self.required = required
126
+ self.schema = schema
127
+ self.example = example
128
+ self.deprecated = deprecated