zillow-rapidapi-client 0.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- zillow_rapidapi_client/__init__.py +18 -0
- zillow_rapidapi_client/_hooks/__init__.py +5 -0
- zillow_rapidapi_client/_hooks/registration.py +13 -0
- zillow_rapidapi_client/_hooks/sdkhooks.py +76 -0
- zillow_rapidapi_client/_hooks/types.py +106 -0
- zillow_rapidapi_client/_version.py +15 -0
- zillow_rapidapi_client/basesdk.py +358 -0
- zillow_rapidapi_client/httpclient.py +134 -0
- zillow_rapidapi_client/models/__init__.py +40 -0
- zillow_rapidapi_client/models/apierror.py +22 -0
- zillow_rapidapi_client/models/property.py +163 -0
- zillow_rapidapi_client/models/propertyextendedsearchop.py +106 -0
- zillow_rapidapi_client/models/propertysearchresponse.py +32 -0
- zillow_rapidapi_client/properties.py +221 -0
- zillow_rapidapi_client/py.typed +1 -0
- zillow_rapidapi_client/sdk.py +114 -0
- zillow_rapidapi_client/sdkconfiguration.py +52 -0
- zillow_rapidapi_client/types/__init__.py +21 -0
- zillow_rapidapi_client/types/basemodel.py +39 -0
- zillow_rapidapi_client/utils/__init__.py +99 -0
- zillow_rapidapi_client/utils/annotations.py +55 -0
- zillow_rapidapi_client/utils/enums.py +34 -0
- zillow_rapidapi_client/utils/eventstreaming.py +238 -0
- zillow_rapidapi_client/utils/forms.py +202 -0
- zillow_rapidapi_client/utils/headers.py +136 -0
- zillow_rapidapi_client/utils/logger.py +27 -0
- zillow_rapidapi_client/utils/metadata.py +118 -0
- zillow_rapidapi_client/utils/queryparams.py +205 -0
- zillow_rapidapi_client/utils/requestbodies.py +66 -0
- zillow_rapidapi_client/utils/retries.py +217 -0
- zillow_rapidapi_client/utils/security.py +174 -0
- zillow_rapidapi_client/utils/serializers.py +215 -0
- zillow_rapidapi_client/utils/url.py +155 -0
- zillow_rapidapi_client/utils/values.py +137 -0
- zillow_rapidapi_client-0.1.3.dist-info/METADATA +419 -0
- zillow_rapidapi_client-0.1.3.dist-info/RECORD +37 -0
- zillow_rapidapi_client-0.1.3.dist-info/WHEEL +4 -0
@@ -0,0 +1,238 @@
|
|
1
|
+
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
2
|
+
|
3
|
+
import re
|
4
|
+
import json
|
5
|
+
from typing import (
|
6
|
+
Callable,
|
7
|
+
Generic,
|
8
|
+
TypeVar,
|
9
|
+
Optional,
|
10
|
+
Generator,
|
11
|
+
AsyncGenerator,
|
12
|
+
Tuple,
|
13
|
+
)
|
14
|
+
import httpx
|
15
|
+
|
16
|
+
T = TypeVar("T")
|
17
|
+
|
18
|
+
|
19
|
+
class EventStream(Generic[T]):
|
20
|
+
response: httpx.Response
|
21
|
+
generator: Generator[T, None, None]
|
22
|
+
|
23
|
+
def __init__(
|
24
|
+
self,
|
25
|
+
response: httpx.Response,
|
26
|
+
decoder: Callable[[str], T],
|
27
|
+
sentinel: Optional[str] = None,
|
28
|
+
):
|
29
|
+
self.response = response
|
30
|
+
self.generator = stream_events(response, decoder, sentinel)
|
31
|
+
|
32
|
+
def __iter__(self):
|
33
|
+
return self
|
34
|
+
|
35
|
+
def __next__(self):
|
36
|
+
return next(self.generator)
|
37
|
+
|
38
|
+
def __enter__(self):
|
39
|
+
return self
|
40
|
+
|
41
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
42
|
+
self.response.close()
|
43
|
+
|
44
|
+
|
45
|
+
class EventStreamAsync(Generic[T]):
|
46
|
+
response: httpx.Response
|
47
|
+
generator: AsyncGenerator[T, None]
|
48
|
+
|
49
|
+
def __init__(
|
50
|
+
self,
|
51
|
+
response: httpx.Response,
|
52
|
+
decoder: Callable[[str], T],
|
53
|
+
sentinel: Optional[str] = None,
|
54
|
+
):
|
55
|
+
self.response = response
|
56
|
+
self.generator = stream_events_async(response, decoder, sentinel)
|
57
|
+
|
58
|
+
def __aiter__(self):
|
59
|
+
return self
|
60
|
+
|
61
|
+
async def __anext__(self):
|
62
|
+
return await self.generator.__anext__()
|
63
|
+
|
64
|
+
async def __aenter__(self):
|
65
|
+
return self
|
66
|
+
|
67
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
68
|
+
await self.response.aclose()
|
69
|
+
|
70
|
+
|
71
|
+
class ServerEvent:
|
72
|
+
id: Optional[str] = None
|
73
|
+
event: Optional[str] = None
|
74
|
+
data: Optional[str] = None
|
75
|
+
retry: Optional[int] = None
|
76
|
+
|
77
|
+
|
78
|
+
MESSAGE_BOUNDARIES = [
|
79
|
+
b"\r\n\r\n",
|
80
|
+
b"\n\n",
|
81
|
+
b"\r\r",
|
82
|
+
]
|
83
|
+
|
84
|
+
|
85
|
+
async def stream_events_async(
|
86
|
+
response: httpx.Response,
|
87
|
+
decoder: Callable[[str], T],
|
88
|
+
sentinel: Optional[str] = None,
|
89
|
+
) -> AsyncGenerator[T, None]:
|
90
|
+
buffer = bytearray()
|
91
|
+
position = 0
|
92
|
+
discard = False
|
93
|
+
async for chunk in response.aiter_bytes():
|
94
|
+
# We've encountered the sentinel value and should no longer process
|
95
|
+
# incoming data. Instead we throw new data away until the server closes
|
96
|
+
# the connection.
|
97
|
+
if discard:
|
98
|
+
continue
|
99
|
+
|
100
|
+
buffer += chunk
|
101
|
+
for i in range(position, len(buffer)):
|
102
|
+
char = buffer[i : i + 1]
|
103
|
+
seq: Optional[bytes] = None
|
104
|
+
if char in [b"\r", b"\n"]:
|
105
|
+
for boundary in MESSAGE_BOUNDARIES:
|
106
|
+
seq = _peek_sequence(i, buffer, boundary)
|
107
|
+
if seq is not None:
|
108
|
+
break
|
109
|
+
if seq is None:
|
110
|
+
continue
|
111
|
+
|
112
|
+
block = buffer[position:i]
|
113
|
+
position = i + len(seq)
|
114
|
+
event, discard = _parse_event(block, decoder, sentinel)
|
115
|
+
if event is not None:
|
116
|
+
yield event
|
117
|
+
|
118
|
+
if position > 0:
|
119
|
+
buffer = buffer[position:]
|
120
|
+
position = 0
|
121
|
+
|
122
|
+
event, discard = _parse_event(buffer, decoder, sentinel)
|
123
|
+
if event is not None:
|
124
|
+
yield event
|
125
|
+
|
126
|
+
|
127
|
+
def stream_events(
|
128
|
+
response: httpx.Response,
|
129
|
+
decoder: Callable[[str], T],
|
130
|
+
sentinel: Optional[str] = None,
|
131
|
+
) -> Generator[T, None, None]:
|
132
|
+
buffer = bytearray()
|
133
|
+
position = 0
|
134
|
+
discard = False
|
135
|
+
for chunk in response.iter_bytes():
|
136
|
+
# We've encountered the sentinel value and should no longer process
|
137
|
+
# incoming data. Instead we throw new data away until the server closes
|
138
|
+
# the connection.
|
139
|
+
if discard:
|
140
|
+
continue
|
141
|
+
|
142
|
+
buffer += chunk
|
143
|
+
for i in range(position, len(buffer)):
|
144
|
+
char = buffer[i : i + 1]
|
145
|
+
seq: Optional[bytes] = None
|
146
|
+
if char in [b"\r", b"\n"]:
|
147
|
+
for boundary in MESSAGE_BOUNDARIES:
|
148
|
+
seq = _peek_sequence(i, buffer, boundary)
|
149
|
+
if seq is not None:
|
150
|
+
break
|
151
|
+
if seq is None:
|
152
|
+
continue
|
153
|
+
|
154
|
+
block = buffer[position:i]
|
155
|
+
position = i + len(seq)
|
156
|
+
event, discard = _parse_event(block, decoder, sentinel)
|
157
|
+
if event is not None:
|
158
|
+
yield event
|
159
|
+
|
160
|
+
if position > 0:
|
161
|
+
buffer = buffer[position:]
|
162
|
+
position = 0
|
163
|
+
|
164
|
+
event, discard = _parse_event(buffer, decoder, sentinel)
|
165
|
+
if event is not None:
|
166
|
+
yield event
|
167
|
+
|
168
|
+
|
169
|
+
def _parse_event(
|
170
|
+
raw: bytearray, decoder: Callable[[str], T], sentinel: Optional[str] = None
|
171
|
+
) -> Tuple[Optional[T], bool]:
|
172
|
+
block = raw.decode()
|
173
|
+
lines = re.split(r"\r?\n|\r", block)
|
174
|
+
publish = False
|
175
|
+
event = ServerEvent()
|
176
|
+
data = ""
|
177
|
+
for line in lines:
|
178
|
+
if not line:
|
179
|
+
continue
|
180
|
+
|
181
|
+
delim = line.find(":")
|
182
|
+
if delim <= 0:
|
183
|
+
continue
|
184
|
+
|
185
|
+
field = line[0:delim]
|
186
|
+
value = line[delim + 1 :] if delim < len(line) - 1 else ""
|
187
|
+
if len(value) and value[0] == " ":
|
188
|
+
value = value[1:]
|
189
|
+
|
190
|
+
if field == "event":
|
191
|
+
event.event = value
|
192
|
+
publish = True
|
193
|
+
elif field == "data":
|
194
|
+
data += value + "\n"
|
195
|
+
publish = True
|
196
|
+
elif field == "id":
|
197
|
+
event.id = value
|
198
|
+
publish = True
|
199
|
+
elif field == "retry":
|
200
|
+
event.retry = int(value) if value.isdigit() else None
|
201
|
+
publish = True
|
202
|
+
|
203
|
+
if sentinel and data == f"{sentinel}\n":
|
204
|
+
return None, True
|
205
|
+
|
206
|
+
if data:
|
207
|
+
data = data[:-1]
|
208
|
+
event.data = data
|
209
|
+
|
210
|
+
data_is_primitive = (
|
211
|
+
data.isnumeric() or data == "true" or data == "false" or data == "null"
|
212
|
+
)
|
213
|
+
data_is_json = (
|
214
|
+
data.startswith("{") or data.startswith("[") or data.startswith('"')
|
215
|
+
)
|
216
|
+
|
217
|
+
if data_is_primitive or data_is_json:
|
218
|
+
try:
|
219
|
+
event.data = json.loads(data)
|
220
|
+
except Exception:
|
221
|
+
pass
|
222
|
+
|
223
|
+
out = None
|
224
|
+
if publish:
|
225
|
+
out = decoder(json.dumps(event.__dict__))
|
226
|
+
|
227
|
+
return out, False
|
228
|
+
|
229
|
+
|
230
|
+
def _peek_sequence(position: int, buffer: bytearray, sequence: bytes):
|
231
|
+
if len(sequence) > (len(buffer) - position):
|
232
|
+
return None
|
233
|
+
|
234
|
+
for i, seq in enumerate(sequence):
|
235
|
+
if buffer[position + i] != seq:
|
236
|
+
return None
|
237
|
+
|
238
|
+
return sequence
|
@@ -0,0 +1,202 @@
|
|
1
|
+
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
2
|
+
|
3
|
+
from typing import (
|
4
|
+
Any,
|
5
|
+
Dict,
|
6
|
+
get_type_hints,
|
7
|
+
List,
|
8
|
+
Tuple,
|
9
|
+
)
|
10
|
+
from pydantic import BaseModel
|
11
|
+
from pydantic.fields import FieldInfo
|
12
|
+
|
13
|
+
from .serializers import marshal_json
|
14
|
+
|
15
|
+
from .metadata import (
|
16
|
+
FormMetadata,
|
17
|
+
MultipartFormMetadata,
|
18
|
+
find_field_metadata,
|
19
|
+
)
|
20
|
+
from .values import _is_set, _val_to_string
|
21
|
+
|
22
|
+
|
23
|
+
def _populate_form(
|
24
|
+
field_name: str,
|
25
|
+
explode: bool,
|
26
|
+
obj: Any,
|
27
|
+
delimiter: str,
|
28
|
+
form: Dict[str, List[str]],
|
29
|
+
):
|
30
|
+
if not _is_set(obj):
|
31
|
+
return form
|
32
|
+
|
33
|
+
if isinstance(obj, BaseModel):
|
34
|
+
items = []
|
35
|
+
|
36
|
+
obj_fields: Dict[str, FieldInfo] = obj.__class__.model_fields
|
37
|
+
for name in obj_fields:
|
38
|
+
obj_field = obj_fields[name]
|
39
|
+
obj_field_name = obj_field.alias if obj_field.alias is not None else name
|
40
|
+
if obj_field_name == "":
|
41
|
+
continue
|
42
|
+
|
43
|
+
val = getattr(obj, name)
|
44
|
+
if not _is_set(val):
|
45
|
+
continue
|
46
|
+
|
47
|
+
if explode:
|
48
|
+
form[obj_field_name] = [_val_to_string(val)]
|
49
|
+
else:
|
50
|
+
items.append(f"{obj_field_name}{delimiter}{_val_to_string(val)}")
|
51
|
+
|
52
|
+
if len(items) > 0:
|
53
|
+
form[field_name] = [delimiter.join(items)]
|
54
|
+
elif isinstance(obj, Dict):
|
55
|
+
items = []
|
56
|
+
for key, value in obj.items():
|
57
|
+
if not _is_set(value):
|
58
|
+
continue
|
59
|
+
|
60
|
+
if explode:
|
61
|
+
form[key] = [_val_to_string(value)]
|
62
|
+
else:
|
63
|
+
items.append(f"{key}{delimiter}{_val_to_string(value)}")
|
64
|
+
|
65
|
+
if len(items) > 0:
|
66
|
+
form[field_name] = [delimiter.join(items)]
|
67
|
+
elif isinstance(obj, List):
|
68
|
+
items = []
|
69
|
+
|
70
|
+
for value in obj:
|
71
|
+
if not _is_set(value):
|
72
|
+
continue
|
73
|
+
|
74
|
+
if explode:
|
75
|
+
if not field_name in form:
|
76
|
+
form[field_name] = []
|
77
|
+
form[field_name].append(_val_to_string(value))
|
78
|
+
else:
|
79
|
+
items.append(_val_to_string(value))
|
80
|
+
|
81
|
+
if len(items) > 0:
|
82
|
+
form[field_name] = [delimiter.join([str(item) for item in items])]
|
83
|
+
else:
|
84
|
+
form[field_name] = [_val_to_string(obj)]
|
85
|
+
|
86
|
+
return form
|
87
|
+
|
88
|
+
|
89
|
+
def serialize_multipart_form(
|
90
|
+
media_type: str, request: Any
|
91
|
+
) -> Tuple[str, Dict[str, Any], Dict[str, Any]]:
|
92
|
+
form: Dict[str, Any] = {}
|
93
|
+
files: Dict[str, Any] = {}
|
94
|
+
|
95
|
+
if not isinstance(request, BaseModel):
|
96
|
+
raise TypeError("invalid request body type")
|
97
|
+
|
98
|
+
request_fields: Dict[str, FieldInfo] = request.__class__.model_fields
|
99
|
+
request_field_types = get_type_hints(request.__class__)
|
100
|
+
|
101
|
+
for name in request_fields:
|
102
|
+
field = request_fields[name]
|
103
|
+
|
104
|
+
val = getattr(request, name)
|
105
|
+
if not _is_set(val):
|
106
|
+
continue
|
107
|
+
|
108
|
+
field_metadata = find_field_metadata(field, MultipartFormMetadata)
|
109
|
+
if not field_metadata:
|
110
|
+
continue
|
111
|
+
|
112
|
+
f_name = field.alias if field.alias else name
|
113
|
+
|
114
|
+
if field_metadata.file:
|
115
|
+
file_fields: Dict[str, FieldInfo] = val.__class__.model_fields
|
116
|
+
|
117
|
+
file_name = ""
|
118
|
+
content = None
|
119
|
+
content_type = None
|
120
|
+
|
121
|
+
for file_field_name in file_fields:
|
122
|
+
file_field = file_fields[file_field_name]
|
123
|
+
|
124
|
+
file_metadata = find_field_metadata(file_field, MultipartFormMetadata)
|
125
|
+
if file_metadata is None:
|
126
|
+
continue
|
127
|
+
|
128
|
+
if file_metadata.content:
|
129
|
+
content = getattr(val, file_field_name, None)
|
130
|
+
elif file_field_name == "content_type":
|
131
|
+
content_type = getattr(val, file_field_name, None)
|
132
|
+
else:
|
133
|
+
file_name = getattr(val, file_field_name)
|
134
|
+
|
135
|
+
if file_name == "" or content is None:
|
136
|
+
raise ValueError("invalid multipart/form-data file")
|
137
|
+
|
138
|
+
if content_type is not None:
|
139
|
+
files[f_name] = (file_name, content, content_type)
|
140
|
+
else:
|
141
|
+
files[f_name] = (file_name, content)
|
142
|
+
elif field_metadata.json:
|
143
|
+
files[f_name] = (
|
144
|
+
None,
|
145
|
+
marshal_json(val, request_field_types[name]),
|
146
|
+
"application/json",
|
147
|
+
)
|
148
|
+
else:
|
149
|
+
if isinstance(val, List):
|
150
|
+
values = []
|
151
|
+
|
152
|
+
for value in val:
|
153
|
+
if not _is_set(value):
|
154
|
+
continue
|
155
|
+
values.append(_val_to_string(value))
|
156
|
+
|
157
|
+
form[f_name + "[]"] = values
|
158
|
+
else:
|
159
|
+
form[f_name] = _val_to_string(val)
|
160
|
+
return media_type, form, files
|
161
|
+
|
162
|
+
|
163
|
+
def serialize_form_data(data: Any) -> Dict[str, Any]:
|
164
|
+
form: Dict[str, List[str]] = {}
|
165
|
+
|
166
|
+
if isinstance(data, BaseModel):
|
167
|
+
data_fields: Dict[str, FieldInfo] = data.__class__.model_fields
|
168
|
+
data_field_types = get_type_hints(data.__class__)
|
169
|
+
for name in data_fields:
|
170
|
+
field = data_fields[name]
|
171
|
+
|
172
|
+
val = getattr(data, name)
|
173
|
+
if not _is_set(val):
|
174
|
+
continue
|
175
|
+
|
176
|
+
metadata = find_field_metadata(field, FormMetadata)
|
177
|
+
if metadata is None:
|
178
|
+
continue
|
179
|
+
|
180
|
+
f_name = field.alias if field.alias is not None else name
|
181
|
+
|
182
|
+
if metadata.json:
|
183
|
+
form[f_name] = [marshal_json(val, data_field_types[name])]
|
184
|
+
else:
|
185
|
+
if metadata.style == "form":
|
186
|
+
_populate_form(
|
187
|
+
f_name,
|
188
|
+
metadata.explode,
|
189
|
+
val,
|
190
|
+
",",
|
191
|
+
form,
|
192
|
+
)
|
193
|
+
else:
|
194
|
+
raise ValueError(f"Invalid form style for field {name}")
|
195
|
+
elif isinstance(data, Dict):
|
196
|
+
for key, value in data.items():
|
197
|
+
if _is_set(value):
|
198
|
+
form[key] = [_val_to_string(value)]
|
199
|
+
else:
|
200
|
+
raise TypeError(f"Invalid request body type {type(data)} for form data")
|
201
|
+
|
202
|
+
return form
|
@@ -0,0 +1,136 @@
|
|
1
|
+
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
2
|
+
|
3
|
+
from typing import (
|
4
|
+
Any,
|
5
|
+
Dict,
|
6
|
+
List,
|
7
|
+
Optional,
|
8
|
+
)
|
9
|
+
from httpx import Headers
|
10
|
+
from pydantic import BaseModel
|
11
|
+
from pydantic.fields import FieldInfo
|
12
|
+
|
13
|
+
from .metadata import (
|
14
|
+
HeaderMetadata,
|
15
|
+
find_field_metadata,
|
16
|
+
)
|
17
|
+
|
18
|
+
from .values import _is_set, _populate_from_globals, _val_to_string
|
19
|
+
|
20
|
+
|
21
|
+
def get_headers(headers_params: Any, gbls: Optional[Any] = None) -> Dict[str, str]:
|
22
|
+
headers: Dict[str, str] = {}
|
23
|
+
|
24
|
+
globals_already_populated = []
|
25
|
+
if _is_set(headers_params):
|
26
|
+
globals_already_populated = _populate_headers(headers_params, gbls, headers, [])
|
27
|
+
if _is_set(gbls):
|
28
|
+
_populate_headers(gbls, None, headers, globals_already_populated)
|
29
|
+
|
30
|
+
return headers
|
31
|
+
|
32
|
+
|
33
|
+
def _populate_headers(
|
34
|
+
headers_params: Any,
|
35
|
+
gbls: Any,
|
36
|
+
header_values: Dict[str, str],
|
37
|
+
skip_fields: List[str],
|
38
|
+
) -> List[str]:
|
39
|
+
globals_already_populated: List[str] = []
|
40
|
+
|
41
|
+
if not isinstance(headers_params, BaseModel):
|
42
|
+
return globals_already_populated
|
43
|
+
|
44
|
+
param_fields: Dict[str, FieldInfo] = headers_params.__class__.model_fields
|
45
|
+
for name in param_fields:
|
46
|
+
if name in skip_fields:
|
47
|
+
continue
|
48
|
+
|
49
|
+
field = param_fields[name]
|
50
|
+
f_name = field.alias if field.alias is not None else name
|
51
|
+
|
52
|
+
metadata = find_field_metadata(field, HeaderMetadata)
|
53
|
+
if metadata is None:
|
54
|
+
continue
|
55
|
+
|
56
|
+
value, global_found = _populate_from_globals(
|
57
|
+
name, getattr(headers_params, name), HeaderMetadata, gbls
|
58
|
+
)
|
59
|
+
if global_found:
|
60
|
+
globals_already_populated.append(name)
|
61
|
+
value = _serialize_header(metadata.explode, value)
|
62
|
+
|
63
|
+
if value != "":
|
64
|
+
header_values[f_name] = value
|
65
|
+
|
66
|
+
return globals_already_populated
|
67
|
+
|
68
|
+
|
69
|
+
def _serialize_header(explode: bool, obj: Any) -> str:
|
70
|
+
if not _is_set(obj):
|
71
|
+
return ""
|
72
|
+
|
73
|
+
if isinstance(obj, BaseModel):
|
74
|
+
items = []
|
75
|
+
obj_fields: Dict[str, FieldInfo] = obj.__class__.model_fields
|
76
|
+
for name in obj_fields:
|
77
|
+
obj_field = obj_fields[name]
|
78
|
+
obj_param_metadata = find_field_metadata(obj_field, HeaderMetadata)
|
79
|
+
|
80
|
+
if not obj_param_metadata:
|
81
|
+
continue
|
82
|
+
|
83
|
+
f_name = obj_field.alias if obj_field.alias is not None else name
|
84
|
+
|
85
|
+
val = getattr(obj, name)
|
86
|
+
if not _is_set(val):
|
87
|
+
continue
|
88
|
+
|
89
|
+
if explode:
|
90
|
+
items.append(f"{f_name}={_val_to_string(val)}")
|
91
|
+
else:
|
92
|
+
items.append(f_name)
|
93
|
+
items.append(_val_to_string(val))
|
94
|
+
|
95
|
+
if len(items) > 0:
|
96
|
+
return ",".join(items)
|
97
|
+
elif isinstance(obj, Dict):
|
98
|
+
items = []
|
99
|
+
|
100
|
+
for key, value in obj.items():
|
101
|
+
if not _is_set(value):
|
102
|
+
continue
|
103
|
+
|
104
|
+
if explode:
|
105
|
+
items.append(f"{key}={_val_to_string(value)}")
|
106
|
+
else:
|
107
|
+
items.append(key)
|
108
|
+
items.append(_val_to_string(value))
|
109
|
+
|
110
|
+
if len(items) > 0:
|
111
|
+
return ",".join([str(item) for item in items])
|
112
|
+
elif isinstance(obj, List):
|
113
|
+
items = []
|
114
|
+
|
115
|
+
for value in obj:
|
116
|
+
if not _is_set(value):
|
117
|
+
continue
|
118
|
+
|
119
|
+
items.append(_val_to_string(value))
|
120
|
+
|
121
|
+
if len(items) > 0:
|
122
|
+
return ",".join(items)
|
123
|
+
elif _is_set(obj):
|
124
|
+
return f"{_val_to_string(obj)}"
|
125
|
+
|
126
|
+
return ""
|
127
|
+
|
128
|
+
|
129
|
+
def get_response_headers(headers: Headers) -> Dict[str, List[str]]:
|
130
|
+
res: Dict[str, List[str]] = {}
|
131
|
+
for k, v in headers.items():
|
132
|
+
if not k in res:
|
133
|
+
res[k] = []
|
134
|
+
|
135
|
+
res[k].append(v)
|
136
|
+
return res
|
@@ -0,0 +1,27 @@
|
|
1
|
+
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
2
|
+
|
3
|
+
import httpx
|
4
|
+
import logging
|
5
|
+
import os
|
6
|
+
from typing import Any, Protocol
|
7
|
+
|
8
|
+
|
9
|
+
class Logger(Protocol):
|
10
|
+
def debug(self, msg: str, *args: Any, **kwargs: Any) -> None:
|
11
|
+
pass
|
12
|
+
|
13
|
+
|
14
|
+
class NoOpLogger:
|
15
|
+
def debug(self, msg: str, *args: Any, **kwargs: Any) -> None:
|
16
|
+
pass
|
17
|
+
|
18
|
+
|
19
|
+
def get_body_content(req: httpx.Request) -> str:
|
20
|
+
return "<streaming body>" if not hasattr(req, "_content") else str(req.content)
|
21
|
+
|
22
|
+
|
23
|
+
def get_default_logger() -> Logger:
|
24
|
+
if os.getenv("ZILLOWRAPIDAPICLIENT_DEBUG"):
|
25
|
+
logging.basicConfig(level=logging.DEBUG)
|
26
|
+
return logging.getLogger("zillow_rapidapi_client")
|
27
|
+
return NoOpLogger()
|