ddapm-test-agent 1.37.0__py3-none-any.whl → 1.38.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddapm_test_agent/vcr_proxy.py +306 -58
- ddapm_test_agent/web.py +2 -2
- {ddapm_test_agent-1.37.0.dist-info → ddapm_test_agent-1.38.0.dist-info}/METADATA +2 -2
- {ddapm_test_agent-1.37.0.dist-info → ddapm_test_agent-1.38.0.dist-info}/RECORD +9 -9
- {ddapm_test_agent-1.37.0.dist-info → ddapm_test_agent-1.38.0.dist-info}/WHEEL +0 -0
- {ddapm_test_agent-1.37.0.dist-info → ddapm_test_agent-1.38.0.dist-info}/entry_points.txt +0 -0
- {ddapm_test_agent-1.37.0.dist-info → ddapm_test_agent-1.38.0.dist-info}/licenses/LICENSE.BSD3 +0 -0
- {ddapm_test_agent-1.37.0.dist-info → ddapm_test_agent-1.38.0.dist-info}/licenses/LICENSE.apache2 +0 -0
- {ddapm_test_agent-1.37.0.dist-info → ddapm_test_agent-1.38.0.dist-info}/top_level.txt +0 -0
ddapm_test_agent/vcr_proxy.py
CHANGED
|
@@ -1,4 +1,7 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
+
import base64
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from glob import glob
|
|
2
5
|
import hashlib
|
|
3
6
|
import json
|
|
4
7
|
import logging
|
|
@@ -6,29 +9,105 @@ import os
|
|
|
6
9
|
import re
|
|
7
10
|
from typing import Any
|
|
8
11
|
from typing import Dict
|
|
12
|
+
from typing import List
|
|
9
13
|
from typing import Optional
|
|
14
|
+
from typing import Union
|
|
15
|
+
from typing import cast
|
|
10
16
|
from urllib.parse import urljoin
|
|
11
17
|
|
|
12
18
|
from aiohttp.web import Request
|
|
13
19
|
from aiohttp.web import Response
|
|
14
20
|
import requests
|
|
15
21
|
from requests_aws4auth import AWS4Auth
|
|
16
|
-
import
|
|
22
|
+
import yaml
|
|
17
23
|
|
|
18
24
|
|
|
19
25
|
logger = logging.getLogger(__name__)
|
|
20
26
|
|
|
21
27
|
|
|
28
|
+
@dataclass
|
|
29
|
+
class CassetteDataRequest:
|
|
30
|
+
"""Represents the request portion of a cassette."""
|
|
31
|
+
|
|
32
|
+
method: str
|
|
33
|
+
url: str
|
|
34
|
+
headers: Dict[str, str]
|
|
35
|
+
body: str
|
|
36
|
+
|
|
37
|
+
@classmethod
|
|
38
|
+
def from_dict(cls, data: Dict[str, Any]) -> "CassetteDataRequest":
|
|
39
|
+
"""Create from a dictionary."""
|
|
40
|
+
return cls(
|
|
41
|
+
method=data["method"],
|
|
42
|
+
url=data["url"],
|
|
43
|
+
headers=data["headers"],
|
|
44
|
+
body=data["body"],
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
48
|
+
"""Convert to a dictionary for JSON serialization."""
|
|
49
|
+
return {
|
|
50
|
+
"method": self.method,
|
|
51
|
+
"url": self.url,
|
|
52
|
+
"headers": self.headers,
|
|
53
|
+
"body": self.body,
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@dataclass
|
|
58
|
+
class CassetteDataResponse:
|
|
59
|
+
"""Represents the response portion of a cassette."""
|
|
60
|
+
|
|
61
|
+
status: Dict[str, Any] # {"code": int, "message": str}
|
|
62
|
+
headers: Dict[str, str]
|
|
63
|
+
body: str
|
|
64
|
+
|
|
65
|
+
@classmethod
|
|
66
|
+
def from_dict(cls, data: Dict[str, Any]) -> "CassetteDataResponse":
|
|
67
|
+
"""Create from a dictionary."""
|
|
68
|
+
return cls(
|
|
69
|
+
status=data["status"],
|
|
70
|
+
headers=data["headers"],
|
|
71
|
+
body=data["body"],
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
75
|
+
"""Convert to a dictionary for JSON serialization."""
|
|
76
|
+
return {
|
|
77
|
+
"status": self.status,
|
|
78
|
+
"headers": self.headers,
|
|
79
|
+
"body": self.body,
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
@dataclass
|
|
84
|
+
class CassetteData:
|
|
85
|
+
"""Represents a VCR cassette with request and response data."""
|
|
86
|
+
|
|
87
|
+
request: CassetteDataRequest
|
|
88
|
+
response: CassetteDataResponse
|
|
89
|
+
|
|
90
|
+
@classmethod
|
|
91
|
+
def from_dict(cls, data: Dict[str, Any]) -> "CassetteData":
|
|
92
|
+
"""Create from a dictionary (e.g., loaded from JSON)."""
|
|
93
|
+
return cls(
|
|
94
|
+
request=CassetteDataRequest.from_dict(data["request"]),
|
|
95
|
+
response=CassetteDataResponse.from_dict(data["response"]),
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
99
|
+
"""Convert to a dictionary for JSON serialization."""
|
|
100
|
+
return {
|
|
101
|
+
"request": self.request.to_dict(),
|
|
102
|
+
"response": self.response.to_dict(),
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
|
|
22
106
|
# Used for AWS signature recalculation for aws services initial proxying
|
|
23
107
|
AWS_REGION = os.environ.get("AWS_REGION", "us-east-1")
|
|
24
108
|
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
|
|
25
109
|
|
|
26
110
|
|
|
27
|
-
def url_path_join(base_url: str, path: str) -> str:
|
|
28
|
-
"""Join a base URL with a path, handling slashes automatically."""
|
|
29
|
-
return urljoin(base_url.rstrip("/") + "/", path.lstrip("/"))
|
|
30
|
-
|
|
31
|
-
|
|
32
111
|
AWS_SERVICES = {
|
|
33
112
|
"bedrock-runtime": "bedrock",
|
|
34
113
|
}
|
|
@@ -72,11 +151,16 @@ NORMALIZERS = [
|
|
|
72
151
|
]
|
|
73
152
|
|
|
74
153
|
|
|
154
|
+
def _url_path_join(base_url: str, path: str) -> str:
|
|
155
|
+
"""Join a base URL with a path, handling slashes automatically."""
|
|
156
|
+
return urljoin(base_url.rstrip("/") + "/", path.lstrip("/"))
|
|
157
|
+
|
|
158
|
+
|
|
75
159
|
def _file_safe_string(s: str) -> str:
|
|
76
160
|
return "".join(c if c.isalnum() or c in ".-" else "_" for c in s)
|
|
77
161
|
|
|
78
162
|
|
|
79
|
-
def
|
|
163
|
+
def _get_custom_vcr_providers(vcr_provider_map: str) -> Dict[str, str]:
|
|
80
164
|
return dict(
|
|
81
165
|
[
|
|
82
166
|
vcr_provider_map.strip().split("=", 1)
|
|
@@ -86,7 +170,7 @@ def get_custom_vcr_providers(vcr_provider_map: str) -> Dict[str, str]:
|
|
|
86
170
|
)
|
|
87
171
|
|
|
88
172
|
|
|
89
|
-
def
|
|
173
|
+
def _normalize_multipart_body(body: bytes) -> str:
|
|
90
174
|
if not body:
|
|
91
175
|
return ""
|
|
92
176
|
|
|
@@ -110,7 +194,41 @@ def normalize_multipart_body(body: bytes) -> str:
|
|
|
110
194
|
return f"[binary_data_{hex_digest}]"
|
|
111
195
|
|
|
112
196
|
|
|
113
|
-
def
|
|
197
|
+
def _decode_body(body: bytes) -> str:
|
|
198
|
+
"""Decode body (request or response), handling binary data gracefully."""
|
|
199
|
+
if not body:
|
|
200
|
+
return ""
|
|
201
|
+
|
|
202
|
+
# Check for null bytes - strong indicator of binary data (e.g., event streams, protobuf)
|
|
203
|
+
if b"\x00" in body:
|
|
204
|
+
return "base64:" + base64.b64encode(body).decode("ascii")
|
|
205
|
+
|
|
206
|
+
try:
|
|
207
|
+
# Try UTF-8 decode - if successful, it's text
|
|
208
|
+
return body.decode("utf-8")
|
|
209
|
+
except UnicodeDecodeError:
|
|
210
|
+
# If UTF-8 fails, treat as binary
|
|
211
|
+
return "base64:" + base64.b64encode(body).decode("ascii")
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def _encode_body(body: str) -> bytes:
|
|
215
|
+
"""Convert cassette body string back to bytes, handling base64-encoded data."""
|
|
216
|
+
if not body:
|
|
217
|
+
return b""
|
|
218
|
+
|
|
219
|
+
# Check for base64 marker first (for binary data that was base64-encoded)
|
|
220
|
+
if body.startswith("base64:"):
|
|
221
|
+
return base64.b64decode(body[7:])
|
|
222
|
+
|
|
223
|
+
try:
|
|
224
|
+
# Try to encode as UTF-8 (most common case)
|
|
225
|
+
return body.encode("utf-8")
|
|
226
|
+
except UnicodeEncodeError:
|
|
227
|
+
# If all else fails, encode as latin-1
|
|
228
|
+
return body.encode("latin-1")
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
def _parse_authorization_header(auth_header: str) -> Dict[str, str]:
|
|
114
232
|
"""Parse AWS Authorization header to extract components"""
|
|
115
233
|
if not auth_header.startswith("AWS4-HMAC-SHA256 "):
|
|
116
234
|
return {}
|
|
@@ -125,20 +243,8 @@ def parse_authorization_header(auth_header: str) -> Dict[str, str]:
|
|
|
125
243
|
return parsed
|
|
126
244
|
|
|
127
245
|
|
|
128
|
-
def
|
|
129
|
-
|
|
130
|
-
extra_ignore_headers = vcr_ignore_headers.split(",")
|
|
131
|
-
|
|
132
|
-
return vcr.VCR(
|
|
133
|
-
cassette_library_dir=cassette_dir,
|
|
134
|
-
record_mode="once",
|
|
135
|
-
match_on=["path", "method"],
|
|
136
|
-
filter_headers=CASSETTE_FILTER_HEADERS + extra_ignore_headers,
|
|
137
|
-
)
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
def generate_cassette_name(path: str, method: str, body: bytes, vcr_cassette_prefix: Optional[str]) -> str:
|
|
141
|
-
decoded_body = normalize_multipart_body(body) if body else ""
|
|
246
|
+
def _generate_cassette_name(path: str, method: str, body: bytes, vcr_cassette_prefix: Optional[str]) -> str:
|
|
247
|
+
decoded_body = _normalize_multipart_body(body) if body else ""
|
|
142
248
|
try:
|
|
143
249
|
parsed_body = json.loads(decoded_body) if decoded_body else {}
|
|
144
250
|
except json.JSONDecodeError:
|
|
@@ -158,11 +264,180 @@ def generate_cassette_name(path: str, method: str, body: bytes, vcr_cassette_pre
|
|
|
158
264
|
)
|
|
159
265
|
|
|
160
266
|
|
|
267
|
+
def _convert_vcr_cassette_to_custom_format(
|
|
268
|
+
cassette_file_path: str,
|
|
269
|
+
request_kwargs: Dict[str, Any],
|
|
270
|
+
vcr_ignore_headers: str,
|
|
271
|
+
) -> CassetteData:
|
|
272
|
+
"""Convert a VCR YAML cassette to our custom JSON format."""
|
|
273
|
+
cassette_file_path_yaml = f"{cassette_file_path}.yaml"
|
|
274
|
+
with open(cassette_file_path_yaml, "r") as f:
|
|
275
|
+
cassette_data = yaml.load(f, Loader=yaml.UnsafeLoader)
|
|
276
|
+
|
|
277
|
+
interaction = cast(Dict[str, Any], cassette_data["interactions"][0])
|
|
278
|
+
|
|
279
|
+
cassette = _write_cassette_file(cassette_file_path, request_kwargs, interaction["response"], vcr_ignore_headers)
|
|
280
|
+
|
|
281
|
+
logger.warning(f"Removing legacy VCR cassette file {cassette_file_path_yaml}.")
|
|
282
|
+
os.remove(cassette_file_path_yaml)
|
|
283
|
+
|
|
284
|
+
return cassette
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
def _normalize_header_value(value: Any) -> str:
|
|
288
|
+
"""Normalize header value to a string (handles list values)."""
|
|
289
|
+
if isinstance(value, list):
|
|
290
|
+
return str(value[0]) if value else ""
|
|
291
|
+
return str(value)
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
def _filter_headers(headers: Dict[str, Any], ignore_headers: List[str]) -> Dict[str, str]:
|
|
295
|
+
"""Filter headers and normalize their values."""
|
|
296
|
+
return {key: _normalize_header_value(value) for key, value in headers.items() if key.lower() not in ignore_headers}
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
def _create_cassette_from_requests_response(
|
|
300
|
+
request_kwargs: Dict[str, Any],
|
|
301
|
+
response: requests.Response,
|
|
302
|
+
ignore_headers: List[str],
|
|
303
|
+
) -> CassetteData:
|
|
304
|
+
"""Create cassette data from a requests.Response object."""
|
|
305
|
+
logger.info(f"Creating cassette data from requests.Response object: {response.content!r}")
|
|
306
|
+
return CassetteData(
|
|
307
|
+
request=CassetteDataRequest(
|
|
308
|
+
method=request_kwargs["method"],
|
|
309
|
+
url=request_kwargs["url"],
|
|
310
|
+
headers=_filter_headers(request_kwargs["headers"], ignore_headers),
|
|
311
|
+
body=_decode_body(request_kwargs["data"]) if request_kwargs["data"] else "",
|
|
312
|
+
),
|
|
313
|
+
response=CassetteDataResponse(
|
|
314
|
+
status={
|
|
315
|
+
"code": response.status_code,
|
|
316
|
+
"message": response.reason or "",
|
|
317
|
+
},
|
|
318
|
+
headers=_filter_headers(dict(response.headers), ignore_headers),
|
|
319
|
+
body=_decode_body(response.content) if response.content else "",
|
|
320
|
+
),
|
|
321
|
+
)
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
def _create_cassette_from_dict(
|
|
325
|
+
request_kwargs: Dict[str, Any],
|
|
326
|
+
response_dict: Dict[str, Any],
|
|
327
|
+
ignore_headers: List[str],
|
|
328
|
+
) -> CassetteData:
|
|
329
|
+
"""Create cassette data from a dictionary (e.g., from VCR YAML)."""
|
|
330
|
+
body_data = response_dict["body"]["string"]
|
|
331
|
+
if isinstance(body_data, bytes):
|
|
332
|
+
body_str = _decode_body(body_data)
|
|
333
|
+
else:
|
|
334
|
+
body_str = body_data
|
|
335
|
+
|
|
336
|
+
return CassetteData(
|
|
337
|
+
request=CassetteDataRequest(
|
|
338
|
+
method=request_kwargs["method"],
|
|
339
|
+
url=request_kwargs["url"],
|
|
340
|
+
headers=_filter_headers(request_kwargs["headers"], ignore_headers),
|
|
341
|
+
body=_decode_body(request_kwargs["data"]) if request_kwargs["data"] else "",
|
|
342
|
+
),
|
|
343
|
+
response=CassetteDataResponse(
|
|
344
|
+
status={
|
|
345
|
+
"code": response_dict["status"]["code"],
|
|
346
|
+
"message": response_dict["status"]["message"],
|
|
347
|
+
},
|
|
348
|
+
headers=_filter_headers(response_dict["headers"], ignore_headers),
|
|
349
|
+
body=body_str,
|
|
350
|
+
),
|
|
351
|
+
)
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
def _write_cassette_file(
|
|
355
|
+
cassette_file_path: str,
|
|
356
|
+
request_kwargs: Dict[str, Any],
|
|
357
|
+
response: Union[requests.Response, Dict[str, Any]],
|
|
358
|
+
vcr_ignore_headers: str,
|
|
359
|
+
) -> CassetteData:
|
|
360
|
+
"""Write cassette data to a JSON file."""
|
|
361
|
+
cassette_file_path_json = f"{cassette_file_path}.json"
|
|
362
|
+
logger.info(f"Writing cassette file to {cassette_file_path_json}")
|
|
363
|
+
|
|
364
|
+
cassette_dir = os.path.dirname(cassette_file_path_json)
|
|
365
|
+
os.makedirs(cassette_dir, exist_ok=True)
|
|
366
|
+
|
|
367
|
+
ignore_headers_list = [
|
|
368
|
+
header.lower() for header in CASSETTE_FILTER_HEADERS + vcr_ignore_headers.split(",") if header
|
|
369
|
+
]
|
|
370
|
+
|
|
371
|
+
if isinstance(response, requests.Response):
|
|
372
|
+
cassette = _create_cassette_from_requests_response(request_kwargs, response, ignore_headers_list)
|
|
373
|
+
else:
|
|
374
|
+
# conversion of legacy VCR cassette to JSON format
|
|
375
|
+
cassette = _create_cassette_from_dict(request_kwargs, response, ignore_headers_list)
|
|
376
|
+
|
|
377
|
+
with open(cassette_file_path_json, "w") as f:
|
|
378
|
+
json.dump(cassette.to_dict(), f, indent=2)
|
|
379
|
+
|
|
380
|
+
return cassette
|
|
381
|
+
|
|
382
|
+
|
|
383
|
+
async def _request(
|
|
384
|
+
cassette_file_path: str, cassette_exists: bool, request_kwargs: Dict[str, Any], vcr_ignore_headers: str
|
|
385
|
+
) -> Response:
|
|
386
|
+
"""
|
|
387
|
+
Load a cassette from file if it exists, otherwise make a request and save the response.
|
|
388
|
+
|
|
389
|
+
If the cassette was created with the VCR package (YAML format), convert it to JSON format.
|
|
390
|
+
"""
|
|
391
|
+
logger.info(f"Making a request to {request_kwargs['url']} with method {request_kwargs['method']}")
|
|
392
|
+
|
|
393
|
+
cassette: Optional[CassetteData] = None
|
|
394
|
+
|
|
395
|
+
if cassette_exists:
|
|
396
|
+
logger.info(f"Cassette file exists at {cassette_file_path}")
|
|
397
|
+
cassette_files = glob(f"{cassette_file_path}.*")
|
|
398
|
+
if not cassette_files:
|
|
399
|
+
raise FileNotFoundError(f"Expected cassette file at {cassette_file_path}.*")
|
|
400
|
+
|
|
401
|
+
file_extension = os.path.splitext(cassette_files[0])[1]
|
|
402
|
+
|
|
403
|
+
if file_extension == ".yaml": # TODO(sabrenner): in a breaking change, remove this
|
|
404
|
+
logger.warning(
|
|
405
|
+
"Converting legacy VCR cassette to JSON format. This will not be supported in ddapm-test-agent==2.0.0"
|
|
406
|
+
)
|
|
407
|
+
cassette = _convert_vcr_cassette_to_custom_format(cassette_file_path, request_kwargs, vcr_ignore_headers)
|
|
408
|
+
elif file_extension == ".json":
|
|
409
|
+
cassette_file_path_json = f"{cassette_file_path}.json"
|
|
410
|
+
with open(cassette_file_path_json, "r") as f:
|
|
411
|
+
cassette = CassetteData.from_dict(json.load(f))
|
|
412
|
+
else:
|
|
413
|
+
raise ValueError(f"Unsupported cassette file extension: {file_extension}")
|
|
414
|
+
else:
|
|
415
|
+
logger.info(f"Cassette file does not exist at {cassette_file_path}, making a request to the provider")
|
|
416
|
+
provider_response = await asyncio.to_thread(lambda: requests.request(**request_kwargs))
|
|
417
|
+
cassette = _write_cassette_file(cassette_file_path, request_kwargs, provider_response, vcr_ignore_headers)
|
|
418
|
+
|
|
419
|
+
# Build response from cassette data
|
|
420
|
+
response_body_str = cassette.response.body
|
|
421
|
+
response_body = _encode_body(response_body_str) if isinstance(response_body_str, str) else b""
|
|
422
|
+
|
|
423
|
+
response = Response(
|
|
424
|
+
body=response_body,
|
|
425
|
+
status=cassette.response.status["code"],
|
|
426
|
+
)
|
|
427
|
+
|
|
428
|
+
skip_headers = {"content-length", "transfer-encoding", "content-encoding", "connection"}
|
|
429
|
+
for key, value in cassette.response.headers.items():
|
|
430
|
+
if key.lower() not in skip_headers:
|
|
431
|
+
response.headers[key] = value
|
|
432
|
+
|
|
433
|
+
return response
|
|
434
|
+
|
|
435
|
+
|
|
161
436
|
async def proxy_request(
|
|
162
437
|
request: Request, vcr_cassettes_directory: str, vcr_ci_mode: bool, vcr_provider_map: str, vcr_ignore_headers: str
|
|
163
438
|
) -> Response:
|
|
164
439
|
provider_base_urls = PROVIDER_BASE_URLS.copy()
|
|
165
|
-
provider_base_urls.update(
|
|
440
|
+
provider_base_urls.update(_get_custom_vcr_providers(vcr_provider_map))
|
|
166
441
|
|
|
167
442
|
path = request.match_info["path"]
|
|
168
443
|
if request.query_string:
|
|
@@ -179,18 +454,17 @@ async def proxy_request(
|
|
|
179
454
|
body_bytes = await request.read()
|
|
180
455
|
|
|
181
456
|
vcr_cassette_prefix = request.pop("vcr_cassette_prefix", None)
|
|
182
|
-
cassette_name =
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
cassette_exists = os.path.exists(cassette_file_path)
|
|
457
|
+
cassette_name = _generate_cassette_name(path, request.method, body_bytes, vcr_cassette_prefix)
|
|
458
|
+
cassette_file_path = os.path.join(vcr_cassettes_directory, provider, cassette_name)
|
|
459
|
+
cassette_exists = len(glob(f"{cassette_file_path}.*")) > 0
|
|
186
460
|
|
|
187
461
|
if vcr_ci_mode and not cassette_exists:
|
|
188
462
|
return Response(
|
|
189
|
-
body=f"Cassette {
|
|
463
|
+
body=f"Cassette {cassette_name} not found while running in CI mode. Please generate the cassette locally and commit it.",
|
|
190
464
|
status=500,
|
|
191
465
|
)
|
|
192
466
|
|
|
193
|
-
target_url =
|
|
467
|
+
target_url = _url_path_join(provider_base_urls[provider], remaining_path)
|
|
194
468
|
headers = {key: value for key, value in request.headers.items() if key != "Host"}
|
|
195
469
|
|
|
196
470
|
request_kwargs: Dict[str, Any] = {
|
|
@@ -211,36 +485,10 @@ async def proxy_request(
|
|
|
211
485
|
)
|
|
212
486
|
|
|
213
487
|
auth_header = request.headers.get("Authorization", "")
|
|
214
|
-
auth_parts =
|
|
488
|
+
auth_parts = _parse_authorization_header(auth_header)
|
|
215
489
|
aws_access_key = auth_parts.get("Credential", "").split("/")[0]
|
|
216
490
|
|
|
217
491
|
auth = AWS4Auth(aws_access_key, AWS_SECRET_ACCESS_KEY, AWS_REGION, AWS_SERVICES[provider])
|
|
218
492
|
request_kwargs["auth"] = auth
|
|
219
493
|
|
|
220
|
-
|
|
221
|
-
with get_vcr(provider, vcr_cassettes_directory, vcr_ignore_headers).use_cassette(cassette_file_name):
|
|
222
|
-
return requests.request(**request_kwargs)
|
|
223
|
-
|
|
224
|
-
provider_response = await asyncio.to_thread(_make_request)
|
|
225
|
-
|
|
226
|
-
# Extract content type without charset
|
|
227
|
-
content_type = provider_response.headers.get("content-type", "")
|
|
228
|
-
if ";" in content_type:
|
|
229
|
-
content_type = content_type.split(";")[0].strip()
|
|
230
|
-
|
|
231
|
-
response = Response(
|
|
232
|
-
body=provider_response.content,
|
|
233
|
-
status=provider_response.status_code,
|
|
234
|
-
content_type=content_type,
|
|
235
|
-
)
|
|
236
|
-
|
|
237
|
-
for key, value in provider_response.headers.items():
|
|
238
|
-
if key.lower() not in (
|
|
239
|
-
"content-length",
|
|
240
|
-
"transfer-encoding",
|
|
241
|
-
"content-encoding",
|
|
242
|
-
"connection",
|
|
243
|
-
):
|
|
244
|
-
response.headers[key] = value
|
|
245
|
-
|
|
246
|
-
return response
|
|
494
|
+
return await _request(cassette_file_path, cassette_exists, request_kwargs, vcr_ignore_headers)
|
ddapm_test_agent/web.py
CHANGED
|
@@ -797,7 +797,7 @@ class WebUI:
|
|
|
797
797
|
|
|
798
798
|
# Get flares for all other session tokens
|
|
799
799
|
session_tokens = set()
|
|
800
|
-
for req_data in
|
|
800
|
+
for req_data in request_storage.get_all_requests() if request_storage is not None else []:
|
|
801
801
|
token = req_data.get("headers", {}).get("X-Datadog-Test-Session-Token")
|
|
802
802
|
if token:
|
|
803
803
|
session_tokens.add(token)
|
|
@@ -934,7 +934,7 @@ class WebUI:
|
|
|
934
934
|
|
|
935
935
|
# Get flares for all other session tokens
|
|
936
936
|
session_tokens = set()
|
|
937
|
-
for req_data in
|
|
937
|
+
for req_data in request_storage.get_all_requests() if request_storage is not None else []:
|
|
938
938
|
token = req_data.get("headers", {}).get("X-Datadog-Test-Session-Token")
|
|
939
939
|
if token:
|
|
940
940
|
session_tokens.add(token)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ddapm-test-agent
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.38.0
|
|
4
4
|
Summary: Test agent for Datadog APM client libraries
|
|
5
5
|
Home-page: https://github.com/Datadog/dd-apm-test-agent
|
|
6
6
|
Author: Kyle Verhoog
|
|
@@ -20,9 +20,9 @@ Requires-Dist: msgpack
|
|
|
20
20
|
Requires-Dist: requests
|
|
21
21
|
Requires-Dist: typing_extensions
|
|
22
22
|
Requires-Dist: yarl
|
|
23
|
-
Requires-Dist: vcrpy
|
|
24
23
|
Requires-Dist: requests-aws4auth
|
|
25
24
|
Requires-Dist: jinja2>=3.0.0
|
|
25
|
+
Requires-Dist: pyyaml
|
|
26
26
|
Requires-Dist: opentelemetry-proto<1.37.0,>1.33.0
|
|
27
27
|
Requires-Dist: protobuf>=3.19.0
|
|
28
28
|
Requires-Dist: grpcio<2.0,>=1.66.2
|
|
@@ -16,8 +16,8 @@ ddapm_test_agent/trace_snapshot.py,sha256=vcz9uCgtpnInKl32nq1n62shhsVdMQPzOWfV3-
|
|
|
16
16
|
ddapm_test_agent/tracerflare.py,sha256=uoSjhPCOKZflgJn5JLv1Unh4gUdAR1-YbC9_1n1iH9w,954
|
|
17
17
|
ddapm_test_agent/tracestats.py,sha256=q_WQZnh2kXSSN3fRIBe_0jMYCBQHcaS3fZmJTge4lWc,2073
|
|
18
18
|
ddapm_test_agent/tracestats_snapshot.py,sha256=VsB6MVnHPjPWHVWnnDdCXJcVKL_izKXEf9lvJ0qbjNQ,3609
|
|
19
|
-
ddapm_test_agent/vcr_proxy.py,sha256=
|
|
20
|
-
ddapm_test_agent/web.py,sha256=
|
|
19
|
+
ddapm_test_agent/vcr_proxy.py,sha256=zbenLsLVBStVJlBFypf0fFuZcDLMpXDT-GJ1ts-tCE0,16654
|
|
20
|
+
ddapm_test_agent/web.py,sha256=D57xkEHuIMSi_ZVEIpxlYEbt_6cipAVhz1XneVWd8AU,59764
|
|
21
21
|
ddapm_test_agent/static/style.css,sha256=0sUpN62bhc3cuW89G39hzNBmIPaYc8MPBt_PvSn0zf0,29560
|
|
22
22
|
ddapm_test_agent/templates/base.html,sha256=Ie9ZvQXtv7YSR4SMFYUp5-eGs5sB_fxApU5pl1jUCCc,1010
|
|
23
23
|
ddapm_test_agent/templates/config.html,sha256=vIgIG79Wo95p5x8RL0Esr9SZ86Ueejt9OBmW_uev1-o,12440
|
|
@@ -31,10 +31,10 @@ ddapm_test_agent/templates/snapshots.html,sha256=uWiYK9yDP9S88nxYNr0vLhYkWSzzj9C
|
|
|
31
31
|
ddapm_test_agent/templates/trace_detail.html,sha256=EqC6l_jo2Afrf511_-pEDgz6-TEogTjtjawMVxzw7G4,1235
|
|
32
32
|
ddapm_test_agent/templates/tracer_flares.html,sha256=wwq7Yaq7shXJineHT38A2tIHIEoirs5ryWu7raFco80,16272
|
|
33
33
|
ddapm_test_agent/templates/traces.html,sha256=-RB_Nc3NzQQXApw5Y37KbmkU4Z41ehBCGouRhgojaRo,805
|
|
34
|
-
ddapm_test_agent-1.
|
|
35
|
-
ddapm_test_agent-1.
|
|
36
|
-
ddapm_test_agent-1.
|
|
37
|
-
ddapm_test_agent-1.
|
|
38
|
-
ddapm_test_agent-1.
|
|
39
|
-
ddapm_test_agent-1.
|
|
40
|
-
ddapm_test_agent-1.
|
|
34
|
+
ddapm_test_agent-1.38.0.dist-info/licenses/LICENSE.BSD3,sha256=J9S_Tq-hhvteDV2W8R0rqht5DZHkmvgdx3gnLZg4j6Q,1493
|
|
35
|
+
ddapm_test_agent-1.38.0.dist-info/licenses/LICENSE.apache2,sha256=5V2RruBHZQIcPyceiv51DjjvdvhgsgS4pnXAOHDuZkQ,11342
|
|
36
|
+
ddapm_test_agent-1.38.0.dist-info/METADATA,sha256=HD007tJKa_D9iaGptUMvkjj8CN4zlH5_Xr78VYv0v5g,30466
|
|
37
|
+
ddapm_test_agent-1.38.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
38
|
+
ddapm_test_agent-1.38.0.dist-info/entry_points.txt,sha256=ulayVs6YJ-0Ej2kxbwn39wOHDVXbyQgFgsbRQmXydcs,250
|
|
39
|
+
ddapm_test_agent-1.38.0.dist-info/top_level.txt,sha256=A9jiKOrrg6VjFAk-mtlSVYN4wr0VsZe58ehGR6IW47U,17
|
|
40
|
+
ddapm_test_agent-1.38.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
{ddapm_test_agent-1.37.0.dist-info → ddapm_test_agent-1.38.0.dist-info}/licenses/LICENSE.BSD3
RENAMED
|
File without changes
|
{ddapm_test_agent-1.37.0.dist-info → ddapm_test_agent-1.38.0.dist-info}/licenses/LICENSE.apache2
RENAMED
|
File without changes
|
|
File without changes
|