train-travel 0.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. train_travel/__init__.py +17 -0
  2. train_travel/_hooks/__init__.py +6 -0
  3. train_travel/_hooks/oauth2scopes.py +16 -0
  4. train_travel/_hooks/registration.py +13 -0
  5. train_travel/_hooks/sdkhooks.py +76 -0
  6. train_travel/_hooks/types.py +112 -0
  7. train_travel/_version.py +15 -0
  8. train_travel/basesdk.py +388 -0
  9. train_travel/bookings.py +1190 -0
  10. train_travel/errors/__init__.py +63 -0
  11. train_travel/errors/no_response_error.py +17 -0
  12. train_travel/errors/responsevalidationerror.py +27 -0
  13. train_travel/errors/traintraveldefaulterror.py +40 -0
  14. train_travel/errors/traintravelerror.py +30 -0
  15. train_travel/httpclient.py +125 -0
  16. train_travel/models/__init__.py +416 -0
  17. train_travel/models/booking.py +59 -0
  18. train_travel/models/booking_input.py +52 -0
  19. train_travel/models/bookingpayment.py +221 -0
  20. train_travel/models/create_booking_paymentop.py +269 -0
  21. train_travel/models/create_booking_rawop.py +144 -0
  22. train_travel/models/create_bookingop.py +144 -0
  23. train_travel/models/delete_bookingop.py +30 -0
  24. train_travel/models/get_bookingop.py +159 -0
  25. train_travel/models/get_bookingsop.py +198 -0
  26. train_travel/models/get_stationsop.py +230 -0
  27. train_travel/models/get_tripsop.py +324 -0
  28. train_travel/models/links_booking.py +35 -0
  29. train_travel/models/links_self.py +36 -0
  30. train_travel/models/new_bookingop.py +92 -0
  31. train_travel/models/security.py +39 -0
  32. train_travel/models/station.py +57 -0
  33. train_travel/models/trip.py +90 -0
  34. train_travel/payments.py +262 -0
  35. train_travel/py.typed +1 -0
  36. train_travel/sdk.py +213 -0
  37. train_travel/sdkconfiguration.py +51 -0
  38. train_travel/stations.py +284 -0
  39. train_travel/trips.py +291 -0
  40. train_travel/types/__init__.py +21 -0
  41. train_travel/types/basemodel.py +77 -0
  42. train_travel/utils/__init__.py +206 -0
  43. train_travel/utils/annotations.py +79 -0
  44. train_travel/utils/datetimes.py +23 -0
  45. train_travel/utils/enums.py +134 -0
  46. train_travel/utils/eventstreaming.py +248 -0
  47. train_travel/utils/forms.py +234 -0
  48. train_travel/utils/headers.py +136 -0
  49. train_travel/utils/logger.py +27 -0
  50. train_travel/utils/metadata.py +118 -0
  51. train_travel/utils/queryparams.py +217 -0
  52. train_travel/utils/requestbodies.py +66 -0
  53. train_travel/utils/retries.py +281 -0
  54. train_travel/utils/security.py +192 -0
  55. train_travel/utils/serializers.py +229 -0
  56. train_travel/utils/unmarshal_json_response.py +38 -0
  57. train_travel/utils/url.py +155 -0
  58. train_travel/utils/values.py +137 -0
  59. train_travel-0.0.7.dist-info/METADATA +567 -0
  60. train_travel-0.0.7.dist-info/RECORD +62 -0
  61. train_travel-0.0.7.dist-info/WHEEL +5 -0
  62. train_travel-0.0.7.dist-info/top_level.txt +1 -0
@@ -0,0 +1,23 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from datetime import datetime
4
+ import sys
5
+
6
+
7
+ def parse_datetime(datetime_string: str) -> datetime:
8
+ """
9
+ Convert a RFC 3339 / ISO 8601 formatted string into a datetime object.
10
+ Python versions 3.11 and later support parsing RFC 3339 directly with
11
+ datetime.fromisoformat(), but for earlier versions, this function
12
+ encapsulates the necessary extra logic.
13
+ """
14
+ # Python 3.11 and later can parse RFC 3339 directly
15
+ if sys.version_info >= (3, 11):
16
+ return datetime.fromisoformat(datetime_string)
17
+
18
+ # For Python 3.10 and earlier, a common ValueError is trailing 'Z' suffix,
19
+ # so fix that upfront.
20
+ if datetime_string.endswith("Z"):
21
+ datetime_string = datetime_string[:-1] + "+00:00"
22
+
23
+ return datetime.fromisoformat(datetime_string)
@@ -0,0 +1,134 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ import enum
4
+ import sys
5
+ from typing import Any
6
+
7
+ from pydantic_core import core_schema
8
+
9
+
10
+ class OpenEnumMeta(enum.EnumMeta):
11
+ # The __call__ method `boundary` kwarg was added in 3.11 and must be present
12
+ # for pyright. Refer also: https://github.com/pylint-dev/pylint/issues/9622
13
+ # pylint: disable=unexpected-keyword-arg
14
+ # The __call__ method `values` varg must be named for pyright.
15
+ # pylint: disable=keyword-arg-before-vararg
16
+
17
+ if sys.version_info >= (3, 11):
18
+ def __call__(
19
+ cls, value, names=None, *values, module=None, qualname=None, type=None, start=1, boundary=None
20
+ ):
21
+ # The `type` kwarg also happens to be a built-in that pylint flags as
22
+ # redeclared. Safe to ignore this lint rule with this scope.
23
+ # pylint: disable=redefined-builtin
24
+
25
+ if names is not None:
26
+ return super().__call__(
27
+ value,
28
+ names=names,
29
+ *values,
30
+ module=module,
31
+ qualname=qualname,
32
+ type=type,
33
+ start=start,
34
+ boundary=boundary,
35
+ )
36
+
37
+ try:
38
+ return super().__call__(
39
+ value,
40
+ names=names, # pyright: ignore[reportArgumentType]
41
+ *values,
42
+ module=module,
43
+ qualname=qualname,
44
+ type=type,
45
+ start=start,
46
+ boundary=boundary,
47
+ )
48
+ except ValueError:
49
+ return value
50
+ else:
51
+ def __call__(
52
+ cls, value, names=None, *, module=None, qualname=None, type=None, start=1
53
+ ):
54
+ # The `type` kwarg also happens to be a built-in that pylint flags as
55
+ # redeclared. Safe to ignore this lint rule with this scope.
56
+ # pylint: disable=redefined-builtin
57
+
58
+ if names is not None:
59
+ return super().__call__(
60
+ value,
61
+ names=names,
62
+ module=module,
63
+ qualname=qualname,
64
+ type=type,
65
+ start=start,
66
+ )
67
+
68
+ try:
69
+ return super().__call__(
70
+ value,
71
+ names=names, # pyright: ignore[reportArgumentType]
72
+ module=module,
73
+ qualname=qualname,
74
+ type=type,
75
+ start=start,
76
+ )
77
+ except ValueError:
78
+ return value
79
+
80
+ def __new__(mcs, name, bases, namespace, **kwargs):
81
+ cls = super().__new__(mcs, name, bases, namespace, **kwargs)
82
+
83
+ # Add __get_pydantic_core_schema__ to make open enums work correctly
84
+ # in union discrimination. In strict mode (used by Pydantic for unions),
85
+ # only known enum values match. In lax mode, unknown values are accepted.
86
+ def __get_pydantic_core_schema__(
87
+ cls_inner: Any, _source_type: Any, _handler: Any
88
+ ) -> core_schema.CoreSchema:
89
+ # Create a validator that only accepts known enum values (for strict mode)
90
+ def validate_strict(v: Any) -> Any:
91
+ if isinstance(v, cls_inner):
92
+ return v
93
+ # Use the parent EnumMeta's __call__ which raises ValueError for unknown values
94
+ return enum.EnumMeta.__call__(cls_inner, v)
95
+
96
+ # Create a lax validator that accepts unknown values
97
+ def validate_lax(v: Any) -> Any:
98
+ if isinstance(v, cls_inner):
99
+ return v
100
+ try:
101
+ return enum.EnumMeta.__call__(cls_inner, v)
102
+ except ValueError:
103
+ # Return the raw value for unknown enum values
104
+ return v
105
+
106
+ # Determine the base type schema (str or int)
107
+ is_int_enum = False
108
+ for base in cls_inner.__mro__:
109
+ if base is int:
110
+ is_int_enum = True
111
+ break
112
+ if base is str:
113
+ break
114
+
115
+ base_schema = (
116
+ core_schema.int_schema()
117
+ if is_int_enum
118
+ else core_schema.str_schema()
119
+ )
120
+
121
+ # Use lax_or_strict_schema:
122
+ # - strict mode: only known enum values match (raises ValueError for unknown)
123
+ # - lax mode: accept any value, return enum member or raw value
124
+ return core_schema.lax_or_strict_schema(
125
+ lax_schema=core_schema.chain_schema(
126
+ [base_schema, core_schema.no_info_plain_validator_function(validate_lax)]
127
+ ),
128
+ strict_schema=core_schema.chain_schema(
129
+ [base_schema, core_schema.no_info_plain_validator_function(validate_strict)]
130
+ ),
131
+ )
132
+
133
+ setattr(cls, "__get_pydantic_core_schema__", classmethod(__get_pydantic_core_schema__))
134
+ return cls
@@ -0,0 +1,248 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ import re
4
+ import json
5
+ from typing import (
6
+ Callable,
7
+ Generic,
8
+ TypeVar,
9
+ Optional,
10
+ Generator,
11
+ AsyncGenerator,
12
+ Tuple,
13
+ )
14
+ import httpx
15
+
16
+ T = TypeVar("T")
17
+
18
+
19
+ class EventStream(Generic[T]):
20
+ # Holds a reference to the SDK client to avoid it being garbage collected
21
+ # and cause termination of the underlying httpx client.
22
+ client_ref: Optional[object]
23
+ response: httpx.Response
24
+ generator: Generator[T, None, None]
25
+
26
+ def __init__(
27
+ self,
28
+ response: httpx.Response,
29
+ decoder: Callable[[str], T],
30
+ sentinel: Optional[str] = None,
31
+ client_ref: Optional[object] = None,
32
+ ):
33
+ self.response = response
34
+ self.generator = stream_events(response, decoder, sentinel)
35
+ self.client_ref = client_ref
36
+
37
+ def __iter__(self):
38
+ return self
39
+
40
+ def __next__(self):
41
+ return next(self.generator)
42
+
43
+ def __enter__(self):
44
+ return self
45
+
46
+ def __exit__(self, exc_type, exc_val, exc_tb):
47
+ self.response.close()
48
+
49
+
50
+ class EventStreamAsync(Generic[T]):
51
+ # Holds a reference to the SDK client to avoid it being garbage collected
52
+ # and cause termination of the underlying httpx client.
53
+ client_ref: Optional[object]
54
+ response: httpx.Response
55
+ generator: AsyncGenerator[T, None]
56
+
57
+ def __init__(
58
+ self,
59
+ response: httpx.Response,
60
+ decoder: Callable[[str], T],
61
+ sentinel: Optional[str] = None,
62
+ client_ref: Optional[object] = None,
63
+ ):
64
+ self.response = response
65
+ self.generator = stream_events_async(response, decoder, sentinel)
66
+ self.client_ref = client_ref
67
+
68
+ def __aiter__(self):
69
+ return self
70
+
71
+ async def __anext__(self):
72
+ return await self.generator.__anext__()
73
+
74
+ async def __aenter__(self):
75
+ return self
76
+
77
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
78
+ await self.response.aclose()
79
+
80
+
81
+ class ServerEvent:
82
+ id: Optional[str] = None
83
+ event: Optional[str] = None
84
+ data: Optional[str] = None
85
+ retry: Optional[int] = None
86
+
87
+
88
+ MESSAGE_BOUNDARIES = [
89
+ b"\r\n\r\n",
90
+ b"\n\n",
91
+ b"\r\r",
92
+ ]
93
+
94
+
95
+ async def stream_events_async(
96
+ response: httpx.Response,
97
+ decoder: Callable[[str], T],
98
+ sentinel: Optional[str] = None,
99
+ ) -> AsyncGenerator[T, None]:
100
+ buffer = bytearray()
101
+ position = 0
102
+ discard = False
103
+ async for chunk in response.aiter_bytes():
104
+ # We've encountered the sentinel value and should no longer process
105
+ # incoming data. Instead we throw new data away until the server closes
106
+ # the connection.
107
+ if discard:
108
+ continue
109
+
110
+ buffer += chunk
111
+ for i in range(position, len(buffer)):
112
+ char = buffer[i : i + 1]
113
+ seq: Optional[bytes] = None
114
+ if char in [b"\r", b"\n"]:
115
+ for boundary in MESSAGE_BOUNDARIES:
116
+ seq = _peek_sequence(i, buffer, boundary)
117
+ if seq is not None:
118
+ break
119
+ if seq is None:
120
+ continue
121
+
122
+ block = buffer[position:i]
123
+ position = i + len(seq)
124
+ event, discard = _parse_event(block, decoder, sentinel)
125
+ if event is not None:
126
+ yield event
127
+
128
+ if position > 0:
129
+ buffer = buffer[position:]
130
+ position = 0
131
+
132
+ event, discard = _parse_event(buffer, decoder, sentinel)
133
+ if event is not None:
134
+ yield event
135
+
136
+
137
+ def stream_events(
138
+ response: httpx.Response,
139
+ decoder: Callable[[str], T],
140
+ sentinel: Optional[str] = None,
141
+ ) -> Generator[T, None, None]:
142
+ buffer = bytearray()
143
+ position = 0
144
+ discard = False
145
+ for chunk in response.iter_bytes():
146
+ # We've encountered the sentinel value and should no longer process
147
+ # incoming data. Instead we throw new data away until the server closes
148
+ # the connection.
149
+ if discard:
150
+ continue
151
+
152
+ buffer += chunk
153
+ for i in range(position, len(buffer)):
154
+ char = buffer[i : i + 1]
155
+ seq: Optional[bytes] = None
156
+ if char in [b"\r", b"\n"]:
157
+ for boundary in MESSAGE_BOUNDARIES:
158
+ seq = _peek_sequence(i, buffer, boundary)
159
+ if seq is not None:
160
+ break
161
+ if seq is None:
162
+ continue
163
+
164
+ block = buffer[position:i]
165
+ position = i + len(seq)
166
+ event, discard = _parse_event(block, decoder, sentinel)
167
+ if event is not None:
168
+ yield event
169
+
170
+ if position > 0:
171
+ buffer = buffer[position:]
172
+ position = 0
173
+
174
+ event, discard = _parse_event(buffer, decoder, sentinel)
175
+ if event is not None:
176
+ yield event
177
+
178
+
179
+ def _parse_event(
180
+ raw: bytearray, decoder: Callable[[str], T], sentinel: Optional[str] = None
181
+ ) -> Tuple[Optional[T], bool]:
182
+ block = raw.decode()
183
+ lines = re.split(r"\r?\n|\r", block)
184
+ publish = False
185
+ event = ServerEvent()
186
+ data = ""
187
+ for line in lines:
188
+ if not line:
189
+ continue
190
+
191
+ delim = line.find(":")
192
+ if delim <= 0:
193
+ continue
194
+
195
+ field = line[0:delim]
196
+ value = line[delim + 1 :] if delim < len(line) - 1 else ""
197
+ if len(value) and value[0] == " ":
198
+ value = value[1:]
199
+
200
+ if field == "event":
201
+ event.event = value
202
+ publish = True
203
+ elif field == "data":
204
+ data += value + "\n"
205
+ publish = True
206
+ elif field == "id":
207
+ event.id = value
208
+ publish = True
209
+ elif field == "retry":
210
+ event.retry = int(value) if value.isdigit() else None
211
+ publish = True
212
+
213
+ if sentinel and data == f"{sentinel}\n":
214
+ return None, True
215
+
216
+ if data:
217
+ data = data[:-1]
218
+ event.data = data
219
+
220
+ data_is_primitive = (
221
+ data.isnumeric() or data == "true" or data == "false" or data == "null"
222
+ )
223
+ data_is_json = (
224
+ data.startswith("{") or data.startswith("[") or data.startswith('"')
225
+ )
226
+
227
+ if data_is_primitive or data_is_json:
228
+ try:
229
+ event.data = json.loads(data)
230
+ except Exception:
231
+ pass
232
+
233
+ out = None
234
+ if publish:
235
+ out = decoder(json.dumps(event.__dict__))
236
+
237
+ return out, False
238
+
239
+
240
+ def _peek_sequence(position: int, buffer: bytearray, sequence: bytes):
241
+ if len(sequence) > (len(buffer) - position):
242
+ return None
243
+
244
+ for i, seq in enumerate(sequence):
245
+ if buffer[position + i] != seq:
246
+ return None
247
+
248
+ return sequence
@@ -0,0 +1,234 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from typing import (
4
+ Any,
5
+ Dict,
6
+ get_type_hints,
7
+ List,
8
+ Tuple,
9
+ )
10
+ from pydantic import BaseModel
11
+ from pydantic.fields import FieldInfo
12
+
13
+ from .serializers import marshal_json
14
+
15
+ from .metadata import (
16
+ FormMetadata,
17
+ MultipartFormMetadata,
18
+ find_field_metadata,
19
+ )
20
+ from .values import _is_set, _val_to_string
21
+
22
+
23
+ def _populate_form(
24
+ field_name: str,
25
+ explode: bool,
26
+ obj: Any,
27
+ delimiter: str,
28
+ form: Dict[str, List[str]],
29
+ ):
30
+ if not _is_set(obj):
31
+ return form
32
+
33
+ if isinstance(obj, BaseModel):
34
+ items = []
35
+
36
+ obj_fields: Dict[str, FieldInfo] = obj.__class__.model_fields
37
+ for name in obj_fields:
38
+ obj_field = obj_fields[name]
39
+ obj_field_name = obj_field.alias if obj_field.alias is not None else name
40
+ if obj_field_name == "":
41
+ continue
42
+
43
+ val = getattr(obj, name)
44
+ if not _is_set(val):
45
+ continue
46
+
47
+ if explode:
48
+ form[obj_field_name] = [_val_to_string(val)]
49
+ else:
50
+ items.append(f"{obj_field_name}{delimiter}{_val_to_string(val)}")
51
+
52
+ if len(items) > 0:
53
+ form[field_name] = [delimiter.join(items)]
54
+ elif isinstance(obj, Dict):
55
+ items = []
56
+ for key, value in obj.items():
57
+ if not _is_set(value):
58
+ continue
59
+
60
+ if explode:
61
+ form[key] = [_val_to_string(value)]
62
+ else:
63
+ items.append(f"{key}{delimiter}{_val_to_string(value)}")
64
+
65
+ if len(items) > 0:
66
+ form[field_name] = [delimiter.join(items)]
67
+ elif isinstance(obj, List):
68
+ items = []
69
+
70
+ for value in obj:
71
+ if not _is_set(value):
72
+ continue
73
+
74
+ if explode:
75
+ if not field_name in form:
76
+ form[field_name] = []
77
+ form[field_name].append(_val_to_string(value))
78
+ else:
79
+ items.append(_val_to_string(value))
80
+
81
+ if len(items) > 0:
82
+ form[field_name] = [delimiter.join([str(item) for item in items])]
83
+ else:
84
+ form[field_name] = [_val_to_string(obj)]
85
+
86
+ return form
87
+
88
+
89
+ def _extract_file_properties(file_obj: Any) -> Tuple[str, Any, Any]:
90
+ """Extract file name, content, and content type from a file object."""
91
+ file_fields: Dict[str, FieldInfo] = file_obj.__class__.model_fields
92
+
93
+ file_name = ""
94
+ content = None
95
+ content_type = None
96
+
97
+ for file_field_name in file_fields:
98
+ file_field = file_fields[file_field_name]
99
+
100
+ file_metadata = find_field_metadata(file_field, MultipartFormMetadata)
101
+ if file_metadata is None:
102
+ continue
103
+
104
+ if file_metadata.content:
105
+ content = getattr(file_obj, file_field_name, None)
106
+ elif file_field_name == "content_type":
107
+ content_type = getattr(file_obj, file_field_name, None)
108
+ else:
109
+ file_name = getattr(file_obj, file_field_name)
110
+
111
+ if file_name == "" or content is None:
112
+ raise ValueError("invalid multipart/form-data file")
113
+
114
+ return file_name, content, content_type
115
+
116
+
117
+ def serialize_multipart_form(
118
+ media_type: str, request: Any
119
+ ) -> Tuple[str, Dict[str, Any], List[Tuple[str, Any]]]:
120
+ form: Dict[str, Any] = {}
121
+ files: List[Tuple[str, Any]] = []
122
+
123
+ if not isinstance(request, BaseModel):
124
+ raise TypeError("invalid request body type")
125
+
126
+ request_fields: Dict[str, FieldInfo] = request.__class__.model_fields
127
+ request_field_types = get_type_hints(request.__class__)
128
+
129
+ for name in request_fields:
130
+ field = request_fields[name]
131
+
132
+ val = getattr(request, name)
133
+ if not _is_set(val):
134
+ continue
135
+
136
+ field_metadata = find_field_metadata(field, MultipartFormMetadata)
137
+ if not field_metadata:
138
+ continue
139
+
140
+ f_name = field.alias if field.alias else name
141
+
142
+ if field_metadata.file:
143
+ if isinstance(val, List):
144
+ # Handle array of files
145
+ array_field_name = f_name
146
+ for file_obj in val:
147
+ if not _is_set(file_obj):
148
+ continue
149
+
150
+ file_name, content, content_type = _extract_file_properties(
151
+ file_obj
152
+ )
153
+
154
+ if content_type is not None:
155
+ files.append(
156
+ (array_field_name, (file_name, content, content_type))
157
+ )
158
+ else:
159
+ files.append((array_field_name, (file_name, content)))
160
+ else:
161
+ # Handle single file
162
+ file_name, content, content_type = _extract_file_properties(val)
163
+
164
+ if content_type is not None:
165
+ files.append((f_name, (file_name, content, content_type)))
166
+ else:
167
+ files.append((f_name, (file_name, content)))
168
+ elif field_metadata.json:
169
+ files.append(
170
+ (
171
+ f_name,
172
+ (
173
+ None,
174
+ marshal_json(val, request_field_types[name]),
175
+ "application/json",
176
+ ),
177
+ )
178
+ )
179
+ else:
180
+ if isinstance(val, List):
181
+ values = []
182
+
183
+ for value in val:
184
+ if not _is_set(value):
185
+ continue
186
+ values.append(_val_to_string(value))
187
+
188
+ array_field_name = f_name
189
+ form[array_field_name] = values
190
+ else:
191
+ form[f_name] = _val_to_string(val)
192
+ return media_type, form, files
193
+
194
+
195
+ def serialize_form_data(data: Any) -> Dict[str, Any]:
196
+ form: Dict[str, List[str]] = {}
197
+
198
+ if isinstance(data, BaseModel):
199
+ data_fields: Dict[str, FieldInfo] = data.__class__.model_fields
200
+ data_field_types = get_type_hints(data.__class__)
201
+ for name in data_fields:
202
+ field = data_fields[name]
203
+
204
+ val = getattr(data, name)
205
+ if not _is_set(val):
206
+ continue
207
+
208
+ metadata = find_field_metadata(field, FormMetadata)
209
+ if metadata is None:
210
+ continue
211
+
212
+ f_name = field.alias if field.alias is not None else name
213
+
214
+ if metadata.json:
215
+ form[f_name] = [marshal_json(val, data_field_types[name])]
216
+ else:
217
+ if metadata.style == "form":
218
+ _populate_form(
219
+ f_name,
220
+ metadata.explode,
221
+ val,
222
+ ",",
223
+ form,
224
+ )
225
+ else:
226
+ raise ValueError(f"Invalid form style for field {name}")
227
+ elif isinstance(data, Dict):
228
+ for key, value in data.items():
229
+ if _is_set(value):
230
+ form[key] = [_val_to_string(value)]
231
+ else:
232
+ raise TypeError(f"Invalid request body type {type(data)} for form data")
233
+
234
+ return form