ddapm-test-agent 1.34.0__py3-none-any.whl → 1.36.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddapm_test_agent/agent.py +399 -1
- ddapm_test_agent/trace_snapshot.py +29 -4
- ddapm_test_agent/vcr_proxy.py +27 -7
- {ddapm_test_agent-1.34.0.dist-info → ddapm_test_agent-1.36.0.dist-info}/METADATA +30 -2
- {ddapm_test_agent-1.34.0.dist-info → ddapm_test_agent-1.36.0.dist-info}/RECORD +10 -10
- {ddapm_test_agent-1.34.0.dist-info → ddapm_test_agent-1.36.0.dist-info}/WHEEL +0 -0
- {ddapm_test_agent-1.34.0.dist-info → ddapm_test_agent-1.36.0.dist-info}/entry_points.txt +0 -0
- {ddapm_test_agent-1.34.0.dist-info → ddapm_test_agent-1.36.0.dist-info}/licenses/LICENSE.BSD3 +0 -0
- {ddapm_test_agent-1.34.0.dist-info → ddapm_test_agent-1.36.0.dist-info}/licenses/LICENSE.apache2 +0 -0
- {ddapm_test_agent-1.34.0.dist-info → ddapm_test_agent-1.36.0.dist-info}/top_level.txt +0 -0
ddapm_test_agent/agent.py
CHANGED
|
@@ -9,10 +9,12 @@ from dataclasses import field
|
|
|
9
9
|
import json
|
|
10
10
|
import logging
|
|
11
11
|
import os
|
|
12
|
+
import platform
|
|
12
13
|
import pprint
|
|
13
14
|
import re
|
|
14
15
|
import socket
|
|
15
16
|
import sys
|
|
17
|
+
import threading
|
|
16
18
|
from typing import Any
|
|
17
19
|
from typing import Awaitable
|
|
18
20
|
from typing import Callable
|
|
@@ -240,6 +242,59 @@ def default_value_trace_results_summary():
|
|
|
240
242
|
}
|
|
241
243
|
|
|
242
244
|
|
|
245
|
+
class MockQuery:
|
|
246
|
+
"""Mock query object that behaves like a dict."""
|
|
247
|
+
|
|
248
|
+
def __init__(self):
|
|
249
|
+
self._data = {} # Empty query params for named pipe processing
|
|
250
|
+
|
|
251
|
+
def get(self, key, default=None):
|
|
252
|
+
return self._data.get(key, default)
|
|
253
|
+
|
|
254
|
+
def __getitem__(self, key):
|
|
255
|
+
return self._data[key]
|
|
256
|
+
|
|
257
|
+
def __contains__(self, key):
|
|
258
|
+
return key in self._data
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
class MockURL:
|
|
262
|
+
"""Mock URL object for named pipe processing."""
|
|
263
|
+
|
|
264
|
+
def __init__(self, path: str):
|
|
265
|
+
self.path = path
|
|
266
|
+
self.query = MockQuery()
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
class MockRequest:
|
|
270
|
+
"""Mock Request object for named pipe processing."""
|
|
271
|
+
|
|
272
|
+
def __init__(
|
|
273
|
+
self, method: str, path: str, headers: Dict[str, str], body: bytes, agent: "Agent", app: web.Application
|
|
274
|
+
):
|
|
275
|
+
self.method = method
|
|
276
|
+
self.path = path
|
|
277
|
+
self.headers = headers
|
|
278
|
+
self._body = body
|
|
279
|
+
self._data: Dict[str, Any] = {}
|
|
280
|
+
self.url = MockURL(path)
|
|
281
|
+
self.content_type = headers.get("Content-Type", "application/msgpack")
|
|
282
|
+
self.app = app
|
|
283
|
+
|
|
284
|
+
async def read(self) -> bytes:
|
|
285
|
+
"""Mock read() method that returns the body data."""
|
|
286
|
+
return self._body
|
|
287
|
+
|
|
288
|
+
def __getitem__(self, key):
|
|
289
|
+
return self._data.get(key)
|
|
290
|
+
|
|
291
|
+
def __setitem__(self, key, value):
|
|
292
|
+
self._data[key] = value
|
|
293
|
+
|
|
294
|
+
def get(self, key, default=None):
|
|
295
|
+
return self._data.get(key, default)
|
|
296
|
+
|
|
297
|
+
|
|
243
298
|
@dataclass
|
|
244
299
|
class _AgentSession:
|
|
245
300
|
"""Maintain Agent state across requests."""
|
|
@@ -274,6 +329,7 @@ class Agent:
|
|
|
274
329
|
"/evp_proxy/v2/api/v2/llmobs",
|
|
275
330
|
"/evp_proxy/v2/api/intake/llm-obs/v1/eval-metric",
|
|
276
331
|
"/evp_proxy/v2/api/intake/llm-obs/v2/eval-metric",
|
|
332
|
+
"/evp_proxy/v4/api/v2/errorsintake",
|
|
277
333
|
]
|
|
278
334
|
|
|
279
335
|
# Note that sessions are not cleared at any point since we don't know
|
|
@@ -746,6 +802,9 @@ class Agent:
|
|
|
746
802
|
async def handle_evp_proxy_v2_llmobs_eval_metric(self, request: Request) -> web.Response:
|
|
747
803
|
return web.HTTPOk()
|
|
748
804
|
|
|
805
|
+
async def handle_evp_proxy_v4_api_v2_errorsintake(self, request: Request) -> web.Response:
|
|
806
|
+
return web.HTTPOk()
|
|
807
|
+
|
|
749
808
|
async def handle_put_tested_integrations(self, request: Request) -> web.Response:
|
|
750
809
|
# we need to store the request manually since this is not a real DD agent endpoint
|
|
751
810
|
await self._store_request(request)
|
|
@@ -823,6 +882,7 @@ class Agent:
|
|
|
823
882
|
"/v0.7/config",
|
|
824
883
|
"/tracer_flare/v1",
|
|
825
884
|
"/evp_proxy/v2/",
|
|
885
|
+
"/evp_proxy/v4/",
|
|
826
886
|
],
|
|
827
887
|
"feature_flags": [],
|
|
828
888
|
"config": {},
|
|
@@ -1069,6 +1129,7 @@ class Agent:
|
|
|
1069
1129
|
self.handle_v1_tracer_flare,
|
|
1070
1130
|
self.handle_evp_proxy_v2_api_v2_llmobs,
|
|
1071
1131
|
self.handle_evp_proxy_v2_llmobs_eval_metric,
|
|
1132
|
+
self.handle_evp_proxy_v4_api_v2_errorsintake,
|
|
1072
1133
|
self.handle_v1_logs,
|
|
1073
1134
|
self.handle_v1_metrics,
|
|
1074
1135
|
):
|
|
@@ -1287,6 +1348,175 @@ class Agent:
|
|
|
1287
1348
|
raise web.HTTPBadRequest(body=msg)
|
|
1288
1349
|
return response
|
|
1289
1350
|
|
|
1351
|
+
def _parse_http_request(self, data: bytes) -> tuple[str, str, Dict[str, str], bytes]:
|
|
1352
|
+
"""Parse HTTP request from raw bytes.
|
|
1353
|
+
|
|
1354
|
+
Returns:
|
|
1355
|
+
tuple: (method, path, headers_dict, body)
|
|
1356
|
+
"""
|
|
1357
|
+
try:
|
|
1358
|
+
# Split request into headers and body
|
|
1359
|
+
if b"\r\n\r\n" in data:
|
|
1360
|
+
header_data, body = data.split(b"\r\n\r\n", 1)
|
|
1361
|
+
else:
|
|
1362
|
+
header_data, body = data, b""
|
|
1363
|
+
|
|
1364
|
+
# Parse headers
|
|
1365
|
+
header_lines = header_data.decode("utf-8", errors="ignore").split("\r\n")
|
|
1366
|
+
if not header_lines:
|
|
1367
|
+
raise ValueError("No request line found")
|
|
1368
|
+
|
|
1369
|
+
# Parse request line (e.g., "POST /v0.4/traces HTTP/1.1")
|
|
1370
|
+
request_line = header_lines[0]
|
|
1371
|
+
parts = request_line.split(" ")
|
|
1372
|
+
if len(parts) < 2:
|
|
1373
|
+
raise ValueError(f"Invalid request line: {request_line}")
|
|
1374
|
+
|
|
1375
|
+
method = parts[0]
|
|
1376
|
+
path = parts[1]
|
|
1377
|
+
|
|
1378
|
+
# Parse headers
|
|
1379
|
+
headers: Dict[str, str] = {}
|
|
1380
|
+
for line in header_lines[1:]:
|
|
1381
|
+
if ":" in line:
|
|
1382
|
+
key, value = line.split(":", 1)
|
|
1383
|
+
headers[key.strip()] = value.strip()
|
|
1384
|
+
|
|
1385
|
+
return method, path, headers, body
|
|
1386
|
+
|
|
1387
|
+
except Exception as e:
|
|
1388
|
+
log.error(f"Error parsing HTTP request: {e}")
|
|
1389
|
+
raise ValueError(f"Failed to parse HTTP request: {e}") from e
|
|
1390
|
+
|
|
1391
|
+
def _process_named_pipe_request(self, data: bytes, app: web.Application) -> bytes:
|
|
1392
|
+
"""Process a request using the existing Agent infrastructure."""
|
|
1393
|
+
try:
|
|
1394
|
+
# Parse the HTTP request
|
|
1395
|
+
method, path, headers, body = self._parse_http_request(data)
|
|
1396
|
+
|
|
1397
|
+
log.info(f"Processing Named Pipe request: {method} {path}")
|
|
1398
|
+
|
|
1399
|
+
# Create a mock Request object
|
|
1400
|
+
mock_request = MockRequest(method, path, headers, body, self, app)
|
|
1401
|
+
|
|
1402
|
+
# Extract session token like the middleware does
|
|
1403
|
+
token = None
|
|
1404
|
+
if "X-Datadog-Test-Session-Token" in headers:
|
|
1405
|
+
token = headers["X-Datadog-Test-Session-Token"]
|
|
1406
|
+
mock_request["session_token"] = token
|
|
1407
|
+
|
|
1408
|
+
# Store request data for agent processing
|
|
1409
|
+
mock_request["_testagent_data"] = body
|
|
1410
|
+
|
|
1411
|
+
# Route to appropriate handler based on path using dictionary lookup
|
|
1412
|
+
path_handlers = {
|
|
1413
|
+
"/v0.4/traces": self.handle_v04_traces,
|
|
1414
|
+
"/v0.5/traces": self.handle_v05_traces,
|
|
1415
|
+
"/v0.7/traces": self.handle_v07_traces,
|
|
1416
|
+
"/v1.0/traces": self.handle_v1_traces,
|
|
1417
|
+
"/v0.6/stats": self.handle_v06_tracestats,
|
|
1418
|
+
"/v0.1/pipeline_stats": self.handle_v01_pipelinestats,
|
|
1419
|
+
"/v0.7/config": self.handle_v07_remoteconfig,
|
|
1420
|
+
"/telemetry/proxy/api/v2/apmtelemetry": self.handle_v2_apmtelemetry,
|
|
1421
|
+
"/profiling/v1/input": self.handle_v1_profiling,
|
|
1422
|
+
"/tracer_flare/v1": self.handle_v1_tracer_flare,
|
|
1423
|
+
"/evp_proxy/v2/api/v2/llmobs": self.handle_evp_proxy_v2_api_v2_llmobs,
|
|
1424
|
+
"/evp_proxy/v2/api/intake/llm-obs/v1/eval-metric": self.handle_evp_proxy_v2_llmobs_eval_metric,
|
|
1425
|
+
"/evp_proxy/v2/api/intake/llm-obs/v2/eval-metric": self.handle_evp_proxy_v2_llmobs_eval_metric,
|
|
1426
|
+
"/evp_proxy/v4/api/v2/errorsintake": self.handle_evp_proxy_v4_api_v2_errorsintake,
|
|
1427
|
+
"/info": self.handle_info,
|
|
1428
|
+
# Test endpoints
|
|
1429
|
+
"/test/session/start": self.handle_session_start,
|
|
1430
|
+
"/test/session/clear": self.handle_session_clear,
|
|
1431
|
+
"/test/session/snapshot": self.handle_snapshot,
|
|
1432
|
+
"/test/session/traces": self.handle_session_traces,
|
|
1433
|
+
"/test/session/apmtelemetry": self.handle_session_apmtelemetry,
|
|
1434
|
+
"/test/session/tracerflares": self.handle_session_tracerflares,
|
|
1435
|
+
"/test/session/stats": self.handle_session_tracestats,
|
|
1436
|
+
"/test/session/requests": self.handle_session_requests,
|
|
1437
|
+
"/test/session/responses/config": self.handle_v07_remoteconfig_create,
|
|
1438
|
+
"/test/session/responses/config/path": self.handle_v07_remoteconfig_path_create,
|
|
1439
|
+
"/test/traces": self.handle_test_traces,
|
|
1440
|
+
"/test/apmtelemetry": self.handle_test_apmtelemetry,
|
|
1441
|
+
"/test/trace/analyze": self.handle_trace_analyze,
|
|
1442
|
+
"/test/trace_check/failures": self.get_trace_check_failures,
|
|
1443
|
+
"/test/trace_check/clear": self.clear_trace_check_failures,
|
|
1444
|
+
"/test/trace_check/summary": self.get_trace_check_summary,
|
|
1445
|
+
"/test/integrations/tested_versions": self.handle_get_tested_integrations,
|
|
1446
|
+
"/test/settings": self.handle_settings,
|
|
1447
|
+
}
|
|
1448
|
+
|
|
1449
|
+
# Get handler from dictionary lookup
|
|
1450
|
+
handler = path_handlers.get(path)
|
|
1451
|
+
if not handler:
|
|
1452
|
+
return self._create_error_response(404, "Not Found")
|
|
1453
|
+
|
|
1454
|
+
try:
|
|
1455
|
+
# Create a new event loop for this thread if one doesn't exist
|
|
1456
|
+
loop = asyncio.get_event_loop()
|
|
1457
|
+
except RuntimeError:
|
|
1458
|
+
loop = asyncio.new_event_loop()
|
|
1459
|
+
asyncio.set_event_loop(loop)
|
|
1460
|
+
|
|
1461
|
+
# Initialize the CheckTrace context like middleware does
|
|
1462
|
+
start_trace("named_pipe_request %s %s" % (method, path))
|
|
1463
|
+
|
|
1464
|
+
# Run the handler
|
|
1465
|
+
response = loop.run_until_complete(handler(mock_request)) # type: ignore[arg-type]
|
|
1466
|
+
|
|
1467
|
+
# Convert aiohttp response to HTTP bytes
|
|
1468
|
+
return self._convert_response_to_http(response)
|
|
1469
|
+
|
|
1470
|
+
except Exception as e:
|
|
1471
|
+
log.error(f"Error processing Named Pipe request: {e}", exc_info=True)
|
|
1472
|
+
return self._create_error_response(500, "Internal Server Error")
|
|
1473
|
+
|
|
1474
|
+
def _convert_response_to_http(self, response: web.Response) -> bytes:
|
|
1475
|
+
"""Convert aiohttp Response to HTTP response bytes."""
|
|
1476
|
+
try:
|
|
1477
|
+
# Build HTTP response
|
|
1478
|
+
status_line = f"HTTP/1.1 {response.status} {response.reason}\r\n"
|
|
1479
|
+
|
|
1480
|
+
# Build headers
|
|
1481
|
+
headers_lines = []
|
|
1482
|
+
for key, value in response.headers.items():
|
|
1483
|
+
headers_lines.append(f"{key}: {value}\r\n")
|
|
1484
|
+
|
|
1485
|
+
# Get response body
|
|
1486
|
+
body_data: bytes
|
|
1487
|
+
if hasattr(response, "body") and response.body:
|
|
1488
|
+
if isinstance(response.body, bytes):
|
|
1489
|
+
body_data = response.body
|
|
1490
|
+
elif isinstance(response.body, str):
|
|
1491
|
+
body_data = response.body.encode()
|
|
1492
|
+
else:
|
|
1493
|
+
# Handle Payload or other types by converting to string first
|
|
1494
|
+
body_data = str(response.body).encode()
|
|
1495
|
+
else:
|
|
1496
|
+
body_data = b""
|
|
1497
|
+
|
|
1498
|
+
# Add Content-Length header if not present
|
|
1499
|
+
if "Content-Length" not in response.headers:
|
|
1500
|
+
headers_lines.append(f"Content-Length: {len(body_data)}\r\n")
|
|
1501
|
+
|
|
1502
|
+
# Combine all parts
|
|
1503
|
+
headers_str = "".join(headers_lines)
|
|
1504
|
+
http_response = status_line + headers_str + "\r\n"
|
|
1505
|
+
return http_response.encode("utf-8") + body_data
|
|
1506
|
+
|
|
1507
|
+
except Exception as e:
|
|
1508
|
+
log.error(f"Error converting response to HTTP: {e}")
|
|
1509
|
+
return self._create_error_response(500, "Internal Server Error")
|
|
1510
|
+
|
|
1511
|
+
def _create_error_response(self, status_code: int, reason: str) -> bytes:
|
|
1512
|
+
"""Create an HTTP error response."""
|
|
1513
|
+
body = f"{status_code} {reason}".encode("utf-8")
|
|
1514
|
+
response = f"HTTP/1.1 {status_code} {reason}\r\n"
|
|
1515
|
+
response += f"Content-Length: {len(body)}\r\n"
|
|
1516
|
+
response += "Content-Type: text/plain\r\n"
|
|
1517
|
+
response += "\r\n"
|
|
1518
|
+
return response.encode("utf-8") + body
|
|
1519
|
+
|
|
1290
1520
|
|
|
1291
1521
|
def make_otlp_http_app(agent: Agent) -> web.Application:
|
|
1292
1522
|
"""Create a separate HTTP application for OTLP endpoints using the shared agent instance."""
|
|
@@ -1356,6 +1586,8 @@ def make_app(
|
|
|
1356
1586
|
snapshot_regex_placeholders: Dict[str, str],
|
|
1357
1587
|
vcr_cassettes_directory: str,
|
|
1358
1588
|
vcr_ci_mode: bool,
|
|
1589
|
+
vcr_provider_map: str,
|
|
1590
|
+
vcr_ignore_headers: str,
|
|
1359
1591
|
) -> web.Application:
|
|
1360
1592
|
agent = Agent()
|
|
1361
1593
|
app = web.Application(
|
|
@@ -1390,6 +1622,7 @@ def make_app(
|
|
|
1390
1622
|
web.post("/evp_proxy/v2/api/v2/llmobs", agent.handle_evp_proxy_v2_api_v2_llmobs),
|
|
1391
1623
|
web.post("/evp_proxy/v2/api/intake/llm-obs/v1/eval-metric", agent.handle_evp_proxy_v2_llmobs_eval_metric),
|
|
1392
1624
|
web.post("/evp_proxy/v2/api/intake/llm-obs/v2/eval-metric", agent.handle_evp_proxy_v2_llmobs_eval_metric),
|
|
1625
|
+
web.post("/evp_proxy/v4/api/v2/errorsintake", agent.handle_evp_proxy_v4_api_v2_errorsintake),
|
|
1393
1626
|
web.get("/info", agent.handle_info),
|
|
1394
1627
|
web.get("/test/session/start", agent.handle_session_start),
|
|
1395
1628
|
web.get("/test/session/clear", agent.handle_session_clear),
|
|
@@ -1417,7 +1650,9 @@ def make_app(
|
|
|
1417
1650
|
web.route(
|
|
1418
1651
|
"*",
|
|
1419
1652
|
"/vcr/{path:.*}",
|
|
1420
|
-
lambda request: proxy_request(
|
|
1653
|
+
lambda request: proxy_request(
|
|
1654
|
+
request, vcr_cassettes_directory, vcr_ci_mode, vcr_provider_map, vcr_ignore_headers
|
|
1655
|
+
),
|
|
1421
1656
|
),
|
|
1422
1657
|
]
|
|
1423
1658
|
)
|
|
@@ -1449,6 +1684,137 @@ def make_app(
|
|
|
1449
1684
|
return app
|
|
1450
1685
|
|
|
1451
1686
|
|
|
1687
|
+
def _start_named_pipe_server(pipe_path: str, agent: "Agent", app: web.Application) -> None:
|
|
1688
|
+
"""Start Windows named pipe server."""
|
|
1689
|
+
if platform.system() != "Windows":
|
|
1690
|
+
log.warning("Named pipes are only supported on Windows, ignoring --trace-named-pipe")
|
|
1691
|
+
return
|
|
1692
|
+
|
|
1693
|
+
# Import Windows-specific modules here to avoid import errors on other platforms
|
|
1694
|
+
try:
|
|
1695
|
+
import win32file
|
|
1696
|
+
import win32pipe
|
|
1697
|
+
except ImportError as e:
|
|
1698
|
+
log.error(f"Failed to import Windows modules for named pipes: {e}")
|
|
1699
|
+
return
|
|
1700
|
+
|
|
1701
|
+
_start_windows_named_pipe_server(pipe_path, agent, app, win32pipe, win32file)
|
|
1702
|
+
|
|
1703
|
+
|
|
1704
|
+
def _create_and_wait_for_client(
|
|
1705
|
+
pipe_path: str, agent: "Agent", app: web.Application, win32pipe: Any, win32file: Any
|
|
1706
|
+
) -> None:
|
|
1707
|
+
"""Create a single pipe instance and wait for a client connection."""
|
|
1708
|
+
while True:
|
|
1709
|
+
try:
|
|
1710
|
+
# Create named pipe instance
|
|
1711
|
+
pipe_handle = win32pipe.CreateNamedPipe(
|
|
1712
|
+
pipe_path,
|
|
1713
|
+
win32pipe.PIPE_ACCESS_DUPLEX,
|
|
1714
|
+
win32pipe.PIPE_TYPE_MESSAGE | win32pipe.PIPE_READMODE_MESSAGE | win32pipe.PIPE_WAIT,
|
|
1715
|
+
win32pipe.PIPE_UNLIMITED_INSTANCES, # allow multiple concurrent connections
|
|
1716
|
+
65536, # output buffer size
|
|
1717
|
+
65536, # input buffer size
|
|
1718
|
+
0, # default timeout
|
|
1719
|
+
None, # security attributes
|
|
1720
|
+
)
|
|
1721
|
+
|
|
1722
|
+
if pipe_handle == win32file.INVALID_HANDLE_VALUE:
|
|
1723
|
+
log.error("Failed to create named pipe instance")
|
|
1724
|
+
import time
|
|
1725
|
+
|
|
1726
|
+
time.sleep(1) # Wait before retrying
|
|
1727
|
+
continue
|
|
1728
|
+
|
|
1729
|
+
log.debug("Named pipe instance created, waiting for client...")
|
|
1730
|
+
|
|
1731
|
+
# Wait for client connection
|
|
1732
|
+
win32pipe.ConnectNamedPipe(pipe_handle, None)
|
|
1733
|
+
log.info("Client connected to named pipe instance")
|
|
1734
|
+
|
|
1735
|
+
# Handle the client request
|
|
1736
|
+
_handle_windows_named_pipe_client(pipe_handle, agent, app, win32pipe, win32file)
|
|
1737
|
+
|
|
1738
|
+
except Exception as e:
|
|
1739
|
+
log.error(f"Error in named pipe instance: {e}")
|
|
1740
|
+
import time
|
|
1741
|
+
|
|
1742
|
+
time.sleep(1) # Wait before retrying
|
|
1743
|
+
|
|
1744
|
+
|
|
1745
|
+
def _start_windows_named_pipe_server(
|
|
1746
|
+
pipe_path: str, agent: "Agent", app: web.Application, win32pipe: Any, win32file: Any
|
|
1747
|
+
) -> None:
|
|
1748
|
+
"""Start a Windows named pipe server with multiple instances."""
|
|
1749
|
+
if win32pipe is None:
|
|
1750
|
+
log.error("Windows named pipe support not available (pywin32 not installed)")
|
|
1751
|
+
return
|
|
1752
|
+
|
|
1753
|
+
log.info(f"Starting Windows named pipe server on: {pipe_path}")
|
|
1754
|
+
|
|
1755
|
+
# Create multiple pipe instances for better concurrency
|
|
1756
|
+
num_instances = 10 # Support up to 10 concurrent connections
|
|
1757
|
+
threads = []
|
|
1758
|
+
|
|
1759
|
+
for _ in range(num_instances):
|
|
1760
|
+
thread = threading.Thread(
|
|
1761
|
+
target=_create_and_wait_for_client, args=(pipe_path, agent, app, win32pipe, win32file), daemon=True
|
|
1762
|
+
)
|
|
1763
|
+
thread.start()
|
|
1764
|
+
threads.append(thread)
|
|
1765
|
+
|
|
1766
|
+
log.info(f"Started {num_instances} named pipe instances")
|
|
1767
|
+
|
|
1768
|
+
# Keep the main thread alive and monitor instance threads
|
|
1769
|
+
try:
|
|
1770
|
+
while True:
|
|
1771
|
+
import time
|
|
1772
|
+
|
|
1773
|
+
time.sleep(5)
|
|
1774
|
+
|
|
1775
|
+
# Check if any threads have died and restart them
|
|
1776
|
+
for i, thread in enumerate(threads):
|
|
1777
|
+
if not thread.is_alive():
|
|
1778
|
+
log.warning(f"Restarting named pipe instance {i}")
|
|
1779
|
+
new_thread = threading.Thread(
|
|
1780
|
+
target=_create_and_wait_for_client,
|
|
1781
|
+
args=(pipe_path, agent, app, win32pipe, win32file),
|
|
1782
|
+
daemon=True,
|
|
1783
|
+
)
|
|
1784
|
+
new_thread.start()
|
|
1785
|
+
threads[i] = new_thread
|
|
1786
|
+
|
|
1787
|
+
except KeyboardInterrupt:
|
|
1788
|
+
log.info("Named pipe server shutting down")
|
|
1789
|
+
|
|
1790
|
+
|
|
1791
|
+
def _handle_windows_named_pipe_client(
|
|
1792
|
+
pipe_handle: Any, agent: "Agent", app: web.Application, win32pipe: Any, win32file: Any
|
|
1793
|
+
) -> None:
|
|
1794
|
+
"""Handle a Windows named pipe client connection."""
|
|
1795
|
+
try:
|
|
1796
|
+
# Read request data
|
|
1797
|
+
result, data = win32file.ReadFile(pipe_handle, 65536)
|
|
1798
|
+
if result == 0: # SUCCESS
|
|
1799
|
+
log.info(f"Received {len(data)} bytes from named pipe client")
|
|
1800
|
+
|
|
1801
|
+
# Process request
|
|
1802
|
+
response = agent._process_named_pipe_request(data, app)
|
|
1803
|
+
|
|
1804
|
+
# Write response
|
|
1805
|
+
win32file.WriteFile(pipe_handle, response)
|
|
1806
|
+
log.info(f"Sent {len(response)} bytes response to named pipe client")
|
|
1807
|
+
|
|
1808
|
+
except Exception as e:
|
|
1809
|
+
log.error(f"Error handling Windows named pipe client: {e}")
|
|
1810
|
+
finally:
|
|
1811
|
+
try:
|
|
1812
|
+
win32pipe.DisconnectNamedPipe(pipe_handle)
|
|
1813
|
+
win32file.CloseHandle(pipe_handle)
|
|
1814
|
+
except Exception:
|
|
1815
|
+
pass
|
|
1816
|
+
|
|
1817
|
+
|
|
1452
1818
|
def main(args: Optional[List[str]] = None) -> None:
|
|
1453
1819
|
if args is None:
|
|
1454
1820
|
args = sys.argv[1:]
|
|
@@ -1550,6 +1916,12 @@ def main(args: Optional[List[str]] = None) -> None:
|
|
|
1550
1916
|
default=os.environ.get("DD_APM_RECEIVER_SOCKET", None),
|
|
1551
1917
|
help=("Will listen for traces on the specified socket path"),
|
|
1552
1918
|
)
|
|
1919
|
+
parser.add_argument(
|
|
1920
|
+
"--trace-named-pipe",
|
|
1921
|
+
type=str,
|
|
1922
|
+
default=os.environ.get("DD_APM_RECEIVER_NAMED_PIPE", None),
|
|
1923
|
+
help=("Will listen for traces on the specified named pipe path"),
|
|
1924
|
+
)
|
|
1553
1925
|
parser.add_argument(
|
|
1554
1926
|
"--trace-request-delay",
|
|
1555
1927
|
type=float,
|
|
@@ -1588,6 +1960,18 @@ def main(args: Optional[List[str]] = None) -> None:
|
|
|
1588
1960
|
default=os.environ.get("VCR_CI_MODE", False),
|
|
1589
1961
|
help="Will change the test agent to record VCR cassettes in CI mode, throwing an error if a cassette is not found on /vcr/{provider}",
|
|
1590
1962
|
)
|
|
1963
|
+
parser.add_argument(
|
|
1964
|
+
"--vcr-provider-map",
|
|
1965
|
+
type=str,
|
|
1966
|
+
default=os.environ.get("VCR_PROVIDER_MAP", ""),
|
|
1967
|
+
help="Comma-separated list of provider=base_url tuples to map providers to paths. Used in addition to the default provider paths.",
|
|
1968
|
+
)
|
|
1969
|
+
parser.add_argument(
|
|
1970
|
+
"--vcr-ignore-headers",
|
|
1971
|
+
type=str,
|
|
1972
|
+
default=os.environ.get("VCR_IGNORE_HEADERS", ""),
|
|
1973
|
+
help="Comma-separated list of headers to ignore when recording VCR cassettes.",
|
|
1974
|
+
)
|
|
1591
1975
|
parsed_args = parser.parse_args(args=args)
|
|
1592
1976
|
logging.basicConfig(level=parsed_args.log_level)
|
|
1593
1977
|
|
|
@@ -1632,6 +2016,8 @@ def main(args: Optional[List[str]] = None) -> None:
|
|
|
1632
2016
|
snapshot_regex_placeholders=parsed_args.snapshot_regex_placeholders,
|
|
1633
2017
|
vcr_cassettes_directory=parsed_args.vcr_cassettes_directory,
|
|
1634
2018
|
vcr_ci_mode=parsed_args.vcr_ci_mode,
|
|
2019
|
+
vcr_provider_map=parsed_args.vcr_provider_map,
|
|
2020
|
+
vcr_ignore_headers=parsed_args.vcr_ignore_headers,
|
|
1635
2021
|
)
|
|
1636
2022
|
|
|
1637
2023
|
# Validate port configuration
|
|
@@ -1644,6 +2030,18 @@ def main(args: Optional[List[str]] = None) -> None:
|
|
|
1644
2030
|
|
|
1645
2031
|
# Get the shared agent instance from the main app
|
|
1646
2032
|
agent = app["agent"]
|
|
2033
|
+
|
|
2034
|
+
# Named pipe setup (after agent is available)
|
|
2035
|
+
named_pipe_thread = None
|
|
2036
|
+
if parsed_args.trace_named_pipe is not None:
|
|
2037
|
+
|
|
2038
|
+
def start_named_pipe_server():
|
|
2039
|
+
_start_named_pipe_server(parsed_args.trace_named_pipe, agent, app)
|
|
2040
|
+
|
|
2041
|
+
named_pipe_thread = threading.Thread(target=start_named_pipe_server, daemon=True)
|
|
2042
|
+
named_pipe_thread.start()
|
|
2043
|
+
log.info(f"Started named pipe server on: {parsed_args.trace_named_pipe}")
|
|
2044
|
+
|
|
1647
2045
|
otlp_http_app = make_otlp_http_app(agent)
|
|
1648
2046
|
|
|
1649
2047
|
async def run_servers():
|
|
@@ -34,6 +34,27 @@ log = logging.getLogger(__name__)
|
|
|
34
34
|
DEFAULT_SNAPSHOT_IGNORES = "span_id,trace_id,parent_id,duration,start,metrics.system.pid,metrics.system.process_id,metrics.process_id,metrics._dd.tracer_kr,meta.runtime-id,span_links.trace_id_high,span_events.time_unix_nano,meta.pathway.hash,meta._dd.p.tid"
|
|
35
35
|
|
|
36
36
|
|
|
37
|
+
def _normalize_span_for_comparison(span: Span) -> Span:
|
|
38
|
+
"""Normalize span for cross-platform comparison (Windows vs Unix).
|
|
39
|
+
|
|
40
|
+
Handles differences in how ddtrace creates spans on different platforms:
|
|
41
|
+
- Windows may omit fields with default values (error, type)
|
|
42
|
+
- Windows may use None instead of empty string for service
|
|
43
|
+
"""
|
|
44
|
+
normalized = dict(span)
|
|
45
|
+
|
|
46
|
+
if "error" not in normalized:
|
|
47
|
+
normalized["error"] = 0
|
|
48
|
+
|
|
49
|
+
if "type" not in normalized:
|
|
50
|
+
normalized["type"] = ""
|
|
51
|
+
|
|
52
|
+
if normalized.get("service") is None:
|
|
53
|
+
normalized["service"] = ""
|
|
54
|
+
|
|
55
|
+
return cast(Span, normalized)
|
|
56
|
+
|
|
57
|
+
|
|
37
58
|
def _key_match(d1: Dict[str, Any], d2: Dict[str, Any], key: str) -> bool:
|
|
38
59
|
"""
|
|
39
60
|
>>> _key_match({"a": 1}, {"a": 2}, "a")
|
|
@@ -347,6 +368,10 @@ def _compare_traces(expected: Trace, received: Trace, ignored: Set[str]) -> None
|
|
|
347
368
|
)
|
|
348
369
|
|
|
349
370
|
for s_exp, s_rec in zip(expected, received):
|
|
371
|
+
# Normalize spans for cross-platform comparison (Windows vs Unix)
|
|
372
|
+
s_exp_norm = _normalize_span_for_comparison(s_exp)
|
|
373
|
+
s_rec_norm = _normalize_span_for_comparison(s_rec)
|
|
374
|
+
|
|
350
375
|
with CheckTrace.add_frame(
|
|
351
376
|
f"snapshot compare of span '{s_exp['name']}' at position {s_exp['span_id']} in trace"
|
|
352
377
|
) as frame:
|
|
@@ -358,12 +383,12 @@ def _compare_traces(expected: Trace, received: Trace, ignored: Set[str]) -> None
|
|
|
358
383
|
metrics_diffs,
|
|
359
384
|
span_link_diffs,
|
|
360
385
|
span_event_diffs,
|
|
361
|
-
) = _diff_spans(
|
|
386
|
+
) = _diff_spans(s_exp_norm, s_rec_norm, ignored)
|
|
362
387
|
|
|
363
388
|
for diffs, diff_type, d_exp, d_rec in [
|
|
364
|
-
(top_level_diffs, "span",
|
|
365
|
-
(meta_diffs, "meta",
|
|
366
|
-
(metrics_diffs, "metrics",
|
|
389
|
+
(top_level_diffs, "span", s_exp_norm, s_rec_norm),
|
|
390
|
+
(meta_diffs, "meta", s_exp_norm["meta"], s_rec_norm["meta"]),
|
|
391
|
+
(metrics_diffs, "metrics", s_exp_norm["metrics"], s_rec_norm["metrics"]),
|
|
367
392
|
]:
|
|
368
393
|
for diff_key in diffs:
|
|
369
394
|
if diff_key not in d_exp:
|
ddapm_test_agent/vcr_proxy.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import asyncio
|
|
1
2
|
import hashlib
|
|
2
3
|
import json
|
|
3
4
|
import logging
|
|
@@ -75,6 +76,16 @@ def _file_safe_string(s: str) -> str:
|
|
|
75
76
|
return "".join(c if c.isalnum() or c in ".-" else "_" for c in s)
|
|
76
77
|
|
|
77
78
|
|
|
79
|
+
def get_custom_vcr_providers(vcr_provider_map: str) -> Dict[str, str]:
|
|
80
|
+
return dict(
|
|
81
|
+
[
|
|
82
|
+
vcr_provider_map.strip().split("=", 1)
|
|
83
|
+
for vcr_provider_map in vcr_provider_map.split(",")
|
|
84
|
+
if vcr_provider_map.strip()
|
|
85
|
+
]
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
|
|
78
89
|
def normalize_multipart_body(body: bytes) -> str:
|
|
79
90
|
if not body:
|
|
80
91
|
return ""
|
|
@@ -114,14 +125,15 @@ def parse_authorization_header(auth_header: str) -> Dict[str, str]:
|
|
|
114
125
|
return parsed
|
|
115
126
|
|
|
116
127
|
|
|
117
|
-
def get_vcr(subdirectory: str, vcr_cassettes_directory: str) -> vcr.VCR:
|
|
128
|
+
def get_vcr(subdirectory: str, vcr_cassettes_directory: str, vcr_ignore_headers: str) -> vcr.VCR:
|
|
118
129
|
cassette_dir = os.path.join(vcr_cassettes_directory, subdirectory)
|
|
130
|
+
extra_ignore_headers = vcr_ignore_headers.split(",")
|
|
119
131
|
|
|
120
132
|
return vcr.VCR(
|
|
121
133
|
cassette_library_dir=cassette_dir,
|
|
122
134
|
record_mode="once",
|
|
123
135
|
match_on=["path", "method"],
|
|
124
|
-
filter_headers=CASSETTE_FILTER_HEADERS,
|
|
136
|
+
filter_headers=CASSETTE_FILTER_HEADERS + extra_ignore_headers,
|
|
125
137
|
)
|
|
126
138
|
|
|
127
139
|
|
|
@@ -146,7 +158,12 @@ def generate_cassette_name(path: str, method: str, body: bytes, vcr_cassette_pre
|
|
|
146
158
|
)
|
|
147
159
|
|
|
148
160
|
|
|
149
|
-
async def proxy_request(
|
|
161
|
+
async def proxy_request(
|
|
162
|
+
request: Request, vcr_cassettes_directory: str, vcr_ci_mode: bool, vcr_provider_map: str, vcr_ignore_headers: str
|
|
163
|
+
) -> Response:
|
|
164
|
+
provider_base_urls = PROVIDER_BASE_URLS.copy()
|
|
165
|
+
provider_base_urls.update(get_custom_vcr_providers(vcr_provider_map))
|
|
166
|
+
|
|
150
167
|
path = request.match_info["path"]
|
|
151
168
|
if request.query_string:
|
|
152
169
|
path = path + "?" + request.query_string
|
|
@@ -156,7 +173,7 @@ async def proxy_request(request: Request, vcr_cassettes_directory: str, vcr_ci_m
|
|
|
156
173
|
return Response(body="Invalid path format. Expected /{provider}/...", status=400)
|
|
157
174
|
|
|
158
175
|
provider, remaining_path = parts
|
|
159
|
-
if provider not in
|
|
176
|
+
if provider not in provider_base_urls:
|
|
160
177
|
return Response(body=f"Unsupported provider: {provider}", status=400)
|
|
161
178
|
|
|
162
179
|
body_bytes = await request.read()
|
|
@@ -173,7 +190,7 @@ async def proxy_request(request: Request, vcr_cassettes_directory: str, vcr_ci_m
|
|
|
173
190
|
status=500,
|
|
174
191
|
)
|
|
175
192
|
|
|
176
|
-
target_url = url_path_join(
|
|
193
|
+
target_url = url_path_join(provider_base_urls[provider], remaining_path)
|
|
177
194
|
headers = {key: value for key, value in request.headers.items() if key != "Host"}
|
|
178
195
|
|
|
179
196
|
request_kwargs: Dict[str, Any] = {
|
|
@@ -200,8 +217,11 @@ async def proxy_request(request: Request, vcr_cassettes_directory: str, vcr_ci_m
|
|
|
200
217
|
auth = AWS4Auth(aws_access_key, AWS_SECRET_ACCESS_KEY, AWS_REGION, AWS_SERVICES[provider])
|
|
201
218
|
request_kwargs["auth"] = auth
|
|
202
219
|
|
|
203
|
-
|
|
204
|
-
|
|
220
|
+
def _make_request():
|
|
221
|
+
with get_vcr(provider, vcr_cassettes_directory, vcr_ignore_headers).use_cassette(cassette_file_name):
|
|
222
|
+
return requests.request(**request_kwargs)
|
|
223
|
+
|
|
224
|
+
provider_response = await asyncio.to_thread(_make_request)
|
|
205
225
|
|
|
206
226
|
# Extract content type without charset
|
|
207
227
|
content_type = provider_response.headers.get("content-type", "")
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ddapm-test-agent
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.36.0
|
|
4
4
|
Summary: Test agent for Datadog APM client libraries
|
|
5
5
|
Home-page: https://github.com/Datadog/dd-apm-test-agent
|
|
6
6
|
Author: Kyle Verhoog
|
|
@@ -25,10 +25,12 @@ Requires-Dist: requests-aws4auth
|
|
|
25
25
|
Requires-Dist: opentelemetry-proto<1.37.0,>1.33.0
|
|
26
26
|
Requires-Dist: protobuf>=3.19.0
|
|
27
27
|
Requires-Dist: grpcio<2.0,>=1.66.2
|
|
28
|
+
Requires-Dist: pywin32; sys_platform == "win32"
|
|
28
29
|
Provides-Extra: testing
|
|
29
30
|
Requires-Dist: ddtrace==3.11.0; extra == "testing"
|
|
30
31
|
Requires-Dist: pytest; extra == "testing"
|
|
31
32
|
Requires-Dist: riot==0.20.1; extra == "testing"
|
|
33
|
+
Requires-Dist: PyYAML==6.0.3; extra == "testing"
|
|
32
34
|
Dynamic: author
|
|
33
35
|
Dynamic: author-email
|
|
34
36
|
Dynamic: classifier
|
|
@@ -183,7 +185,33 @@ The cassettes are matched based on the path, method, and body of the request. To
|
|
|
183
185
|
-v $PWD/vcr-cassettes:/vcr-cassettes
|
|
184
186
|
ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:latest
|
|
185
187
|
|
|
186
|
-
Optionally specifying whatever mounted path is used for the cassettes directory. The test agent comes with a default set of cassettes for OpenAI, Azure OpenAI, and
|
|
188
|
+
Optionally specifying whatever mounted path is used for the cassettes directory. The test agent comes with a default set of cassettes for OpenAI, Azure OpenAI, DeepSeek, Anthropic, Google GenAI, and AWS Bedrock Runtime.
|
|
189
|
+
|
|
190
|
+
#### Custom 3rd Party Providers
|
|
191
|
+
|
|
192
|
+
The test agent can be configured to also register custom 3rd party providers. This is done by setting the `VCR_PROVIDER_MAP` environment variable or the `--vcr-provider-map` command-line option to a comma-separated list of provider names and their corresponding base URLs.
|
|
193
|
+
|
|
194
|
+
```shell
|
|
195
|
+
VCR_PROVIDER_MAP="provider1=http://provider1.com/,provider2=http://provider2.com/"
|
|
196
|
+
```
|
|
197
|
+
|
|
198
|
+
or
|
|
199
|
+
|
|
200
|
+
```shell
|
|
201
|
+
--vcr-provider-map="provider1=http://provider1.com/,provider2=http://provider2.com/"
|
|
202
|
+
```
|
|
203
|
+
|
|
204
|
+
The provider names are used to match the provider name in the request path, and the base URLs are used to proxy the request to the corresponding provider API endpoint.
|
|
205
|
+
|
|
206
|
+
With this configuration set, you can make the following request to the test agent without error:
|
|
207
|
+
|
|
208
|
+
```shell
|
|
209
|
+
curl -X POST 'http://127.0.0.1:9126/vcr/provider1/some/path'
|
|
210
|
+
```
|
|
211
|
+
|
|
212
|
+
#### Ignoring Headers in Recorded Cassettes
|
|
213
|
+
|
|
214
|
+
To ignore headers in recorded cassettes, you can use the `--vcr-ignore-headers` flag or `VCR_IGNORE_HEADERS` environment variable. The list should take the form of `header1,header2,header3`, and will be omitted from the recorded cassettes.
|
|
187
215
|
|
|
188
216
|
#### AWS Services
|
|
189
217
|
AWS service proxying, specifically recording cassettes for the first time, requires a `AWS_SECRET_ACCESS_KEY` environment variable to be set for the container running the test agent. This is used to recalculate the AWS signature for the request, as the one generated client-side likely used `{test-agent-host}:{test-agent-port}/vcr/{aws-service}` as the host, and the signature will mismatch that on the actual AWS service.
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
ddapm_test_agent/__init__.py,sha256=IEYMDM-xI0IoHYSYw4Eva5263puB_crrrbLstOCScRw,106
|
|
2
|
-
ddapm_test_agent/agent.py,sha256=
|
|
2
|
+
ddapm_test_agent/agent.py,sha256=PG22ZeuGiQoEoC7YN3ffedAbXXYwYnnCMbxBT0_pHdk,90104
|
|
3
3
|
ddapm_test_agent/apmtelemetry.py,sha256=w_9-yUDh1dgox-FfLqeOHU2C14GcjOjen-_SVagiZrc,861
|
|
4
4
|
ddapm_test_agent/checks.py,sha256=pBa4YKZQVA8qaTVJ_XgMA6TmlUZNh99YOrCFJA7fwo0,6865
|
|
5
5
|
ddapm_test_agent/client.py,sha256=ViEmiRX9Y3SQ-KBhSc-FdzBmIVIe8Ij9jj-Q6VGyzLY,7359
|
|
@@ -12,15 +12,15 @@ ddapm_test_agent/metrics.py,sha256=EZo7lSec2oAiH7tUqavKZ2MJM7TwbuFGE3AT3cXwmSM,3
|
|
|
12
12
|
ddapm_test_agent/remoteconfig.py,sha256=_QjYUKc3JF31DxdvISDXgslm5WVnYWAw0hyckWuLc1c,3606
|
|
13
13
|
ddapm_test_agent/trace.py,sha256=t0OR8w3NcZK-EOOoadgPITiZqS5tAJGtxqLVGLEw7Kg,45816
|
|
14
14
|
ddapm_test_agent/trace_checks.py,sha256=bRg2eLKoHROXIFJRbujMUn0T3x1X8pZso-j8wXNomec,9972
|
|
15
|
-
ddapm_test_agent/trace_snapshot.py,sha256=
|
|
15
|
+
ddapm_test_agent/trace_snapshot.py,sha256=vcz9uCgtpnInKl32nq1n62shhsVdMQPzOWfV3-RjTVM,23781
|
|
16
16
|
ddapm_test_agent/tracerflare.py,sha256=uoSjhPCOKZflgJn5JLv1Unh4gUdAR1-YbC9_1n1iH9w,954
|
|
17
17
|
ddapm_test_agent/tracestats.py,sha256=q_WQZnh2kXSSN3fRIBe_0jMYCBQHcaS3fZmJTge4lWc,2073
|
|
18
18
|
ddapm_test_agent/tracestats_snapshot.py,sha256=VsB6MVnHPjPWHVWnnDdCXJcVKL_izKXEf9lvJ0qbjNQ,3609
|
|
19
|
-
ddapm_test_agent/vcr_proxy.py,sha256=
|
|
20
|
-
ddapm_test_agent-1.
|
|
21
|
-
ddapm_test_agent-1.
|
|
22
|
-
ddapm_test_agent-1.
|
|
23
|
-
ddapm_test_agent-1.
|
|
24
|
-
ddapm_test_agent-1.
|
|
25
|
-
ddapm_test_agent-1.
|
|
26
|
-
ddapm_test_agent-1.
|
|
19
|
+
ddapm_test_agent/vcr_proxy.py,sha256=dEJ5xXxxvFHAbvN-OgXEaOlqz54Vh1k_1eQBgf146lU,7812
|
|
20
|
+
ddapm_test_agent-1.36.0.dist-info/licenses/LICENSE.BSD3,sha256=J9S_Tq-hhvteDV2W8R0rqht5DZHkmvgdx3gnLZg4j6Q,1493
|
|
21
|
+
ddapm_test_agent-1.36.0.dist-info/licenses/LICENSE.apache2,sha256=5V2RruBHZQIcPyceiv51DjjvdvhgsgS4pnXAOHDuZkQ,11342
|
|
22
|
+
ddapm_test_agent-1.36.0.dist-info/METADATA,sha256=e9jH-GwRcURPTcuyTDkyXsrkIAGb4cNepAN8-pOCywg,29678
|
|
23
|
+
ddapm_test_agent-1.36.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
24
|
+
ddapm_test_agent-1.36.0.dist-info/entry_points.txt,sha256=ulayVs6YJ-0Ej2kxbwn39wOHDVXbyQgFgsbRQmXydcs,250
|
|
25
|
+
ddapm_test_agent-1.36.0.dist-info/top_level.txt,sha256=A9jiKOrrg6VjFAk-mtlSVYN4wr0VsZe58ehGR6IW47U,17
|
|
26
|
+
ddapm_test_agent-1.36.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
{ddapm_test_agent-1.34.0.dist-info → ddapm_test_agent-1.36.0.dist-info}/licenses/LICENSE.BSD3
RENAMED
|
File without changes
|
{ddapm_test_agent-1.34.0.dist-info → ddapm_test_agent-1.36.0.dist-info}/licenses/LICENSE.apache2
RENAMED
|
File without changes
|
|
File without changes
|