ddapm-test-agent 1.31.0__py3-none-any.whl → 1.32.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddapm_test_agent/agent.py +240 -3
- ddapm_test_agent/client.py +63 -20
- ddapm_test_agent/logs.py +67 -0
- ddapm_test_agent/metrics.py +94 -0
- ddapm_test_agent/trace.py +399 -0
- {ddapm_test_agent-1.31.0.dist-info → ddapm_test_agent-1.32.0.dist-info}/METADATA +55 -2
- {ddapm_test_agent-1.31.0.dist-info → ddapm_test_agent-1.32.0.dist-info}/RECORD +12 -10
- {ddapm_test_agent-1.31.0.dist-info → ddapm_test_agent-1.32.0.dist-info}/WHEEL +0 -0
- {ddapm_test_agent-1.31.0.dist-info → ddapm_test_agent-1.32.0.dist-info}/entry_points.txt +0 -0
- {ddapm_test_agent-1.31.0.dist-info → ddapm_test_agent-1.32.0.dist-info}/licenses/LICENSE.BSD3 +0 -0
- {ddapm_test_agent-1.31.0.dist-info → ddapm_test_agent-1.32.0.dist-info}/licenses/LICENSE.apache2 +0 -0
- {ddapm_test_agent-1.31.0.dist-info → ddapm_test_agent-1.32.0.dist-info}/top_level.txt +0 -0
ddapm_test_agent/agent.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import argparse
|
|
2
|
+
import asyncio
|
|
2
3
|
import atexit
|
|
3
4
|
import base64
|
|
4
5
|
from collections import OrderedDict
|
|
@@ -12,6 +13,7 @@ import pprint
|
|
|
12
13
|
import re
|
|
13
14
|
import socket
|
|
14
15
|
import sys
|
|
16
|
+
from typing import Any
|
|
15
17
|
from typing import Awaitable
|
|
16
18
|
from typing import Callable
|
|
17
19
|
from typing import DefaultDict
|
|
@@ -32,8 +34,11 @@ from aiohttp import web
|
|
|
32
34
|
from aiohttp.web import HTTPException
|
|
33
35
|
from aiohttp.web import Request
|
|
34
36
|
from aiohttp.web import middleware
|
|
37
|
+
from grpc import aio as grpc_aio
|
|
35
38
|
from msgpack.exceptions import ExtraData as MsgPackExtraDataException
|
|
36
39
|
from multidict import CIMultiDict
|
|
40
|
+
from opentelemetry.proto.collector.logs.v1.logs_service_pb2_grpc import add_LogsServiceServicer_to_server
|
|
41
|
+
from opentelemetry.proto.collector.metrics.v1.metrics_service_pb2_grpc import add_MetricsServiceServicer_to_server
|
|
37
42
|
|
|
38
43
|
from . import _get_version
|
|
39
44
|
from . import trace_snapshot
|
|
@@ -44,10 +49,17 @@ from .checks import CheckTrace
|
|
|
44
49
|
from .checks import Checks
|
|
45
50
|
from .checks import start_trace
|
|
46
51
|
from .integration import Integration
|
|
52
|
+
from .logs import LOGS_ENDPOINT
|
|
53
|
+
from .logs import OTLPLogsGRPCServicer
|
|
54
|
+
from .logs import decode_logs_request
|
|
55
|
+
from .metrics import METRICS_ENDPOINT
|
|
56
|
+
from .metrics import OTLPMetricsGRPCServicer
|
|
57
|
+
from .metrics import decode_metrics_request
|
|
47
58
|
from .remoteconfig import RemoteConfigServer
|
|
48
59
|
from .trace import Span
|
|
49
60
|
from .trace import Trace
|
|
50
61
|
from .trace import TraceMap
|
|
62
|
+
from .trace import decode_v1 as trace_decode_v1
|
|
51
63
|
from .trace import decode_v04 as trace_decode_v04
|
|
52
64
|
from .trace import decode_v05 as trace_decode_v05
|
|
53
65
|
from .trace import decode_v07 as trace_decode_v07
|
|
@@ -66,6 +78,12 @@ from .tracestats import v06StatsPayload
|
|
|
66
78
|
from .vcr_proxy import proxy_request
|
|
67
79
|
|
|
68
80
|
|
|
81
|
+
# Default ports
|
|
82
|
+
DEFAULT_APM_PORT = 8126
|
|
83
|
+
DEFAULT_OTLP_HTTP_PORT = 4318
|
|
84
|
+
DEFAULT_OTLP_GRPC_PORT = 4317
|
|
85
|
+
|
|
86
|
+
|
|
69
87
|
class NoSuchSessionException(Exception):
|
|
70
88
|
pass
|
|
71
89
|
|
|
@@ -118,7 +136,7 @@ async def _vcr_proxy_cassette_prefix(request: Request) -> Optional[str]:
|
|
|
118
136
|
request_body: dict[str, str] = await request.json()
|
|
119
137
|
requested_test_name = request_body.get("test_name")
|
|
120
138
|
return requested_test_name
|
|
121
|
-
except json.JSONDecodeError:
|
|
139
|
+
except (json.JSONDecodeError, UnicodeDecodeError):
|
|
122
140
|
return None
|
|
123
141
|
|
|
124
142
|
|
|
@@ -247,6 +265,7 @@ class Agent:
|
|
|
247
265
|
"/v0.4/traces",
|
|
248
266
|
"/v0.5/traces",
|
|
249
267
|
"/v0.7/traces",
|
|
268
|
+
"/v1.0/traces",
|
|
250
269
|
"/v0.6/stats",
|
|
251
270
|
"/v0.7/config",
|
|
252
271
|
"/telemetry/proxy/api/v2/apmtelemetry",
|
|
@@ -428,6 +447,8 @@ class Agent:
|
|
|
428
447
|
return self._decode_v05_traces(req)
|
|
429
448
|
elif req.match_info.handler == self.handle_v07_traces:
|
|
430
449
|
return self._decode_v07_traces(req)
|
|
450
|
+
elif req.match_info.handler == self.handle_v1_traces:
|
|
451
|
+
return self._decode_v1_traces(req)
|
|
431
452
|
return []
|
|
432
453
|
|
|
433
454
|
async def _traces_by_session(self, token: Optional[str]) -> List[Trace]:
|
|
@@ -486,6 +507,34 @@ class Agent:
|
|
|
486
507
|
stats.append(s)
|
|
487
508
|
return stats
|
|
488
509
|
|
|
510
|
+
async def _logs_by_session(self, token: Optional[str]) -> List[Dict[str, Any]]:
|
|
511
|
+
"""Return the logs that belong to the given session token.
|
|
512
|
+
|
|
513
|
+
If token is None or if the token was used to manually start a session
|
|
514
|
+
with /session-start then return all logs that were sent since the last
|
|
515
|
+
/session-start request was made.
|
|
516
|
+
"""
|
|
517
|
+
logs: List[Dict[str, Any]] = []
|
|
518
|
+
for req in self._requests_by_session(token):
|
|
519
|
+
if req.match_info.handler == self.handle_v1_logs:
|
|
520
|
+
logs_data = self._decode_v1_logs(req)
|
|
521
|
+
logs.append(logs_data)
|
|
522
|
+
return logs
|
|
523
|
+
|
|
524
|
+
async def _metrics_by_session(self, token: Optional[str]) -> List[Dict[str, Any]]:
|
|
525
|
+
"""Return the metrics that belong to the given session token.
|
|
526
|
+
|
|
527
|
+
If token is None or if the token was used to manually start a session
|
|
528
|
+
with /session-start then return all metrics that were sent since the last
|
|
529
|
+
/session-start request was made.
|
|
530
|
+
"""
|
|
531
|
+
metrics: List[Dict[str, Any]] = []
|
|
532
|
+
for req in self._requests_by_session(token):
|
|
533
|
+
if req.match_info.handler == self.handle_v1_metrics:
|
|
534
|
+
metrics_data = self._decode_v1_metrics(req)
|
|
535
|
+
metrics.append(metrics_data)
|
|
536
|
+
return metrics
|
|
537
|
+
|
|
489
538
|
async def _integration_requests_by_session(
|
|
490
539
|
self,
|
|
491
540
|
token: Optional[str],
|
|
@@ -554,10 +603,30 @@ class Agent:
|
|
|
554
603
|
raw_data = self._request_data(request)
|
|
555
604
|
return trace_decode_v07(raw_data)
|
|
556
605
|
|
|
606
|
+
def _decode_v1_traces(self, request: Request) -> v04TracePayload:
|
|
607
|
+
raw_data = self._request_data(request)
|
|
608
|
+
return trace_decode_v1(raw_data)
|
|
609
|
+
|
|
557
610
|
def _decode_v06_tracestats(self, request: Request) -> v06StatsPayload:
|
|
558
611
|
raw_data = self._request_data(request)
|
|
559
612
|
return tracestats_decode_v06(raw_data)
|
|
560
613
|
|
|
614
|
+
def _decode_v1_logs(self, request: Request) -> Dict[str, Any]:
|
|
615
|
+
raw_data = self._request_data(request)
|
|
616
|
+
content_type = request.headers.get("Content-Type", "").lower().strip()
|
|
617
|
+
try:
|
|
618
|
+
return decode_logs_request(raw_data, content_type)
|
|
619
|
+
except Exception as e:
|
|
620
|
+
raise web.HTTPBadRequest(text=str(e))
|
|
621
|
+
|
|
622
|
+
def _decode_v1_metrics(self, request: Request) -> Dict[str, Any]:
|
|
623
|
+
raw_data = self._request_data(request)
|
|
624
|
+
content_type = request.headers.get("Content-Type", "").lower().strip()
|
|
625
|
+
try:
|
|
626
|
+
return decode_metrics_request(raw_data, content_type)
|
|
627
|
+
except Exception as e:
|
|
628
|
+
raise web.HTTPBadRequest(text=str(e))
|
|
629
|
+
|
|
561
630
|
async def handle_v04_traces(self, request: Request) -> web.Response:
|
|
562
631
|
return await self._handle_traces(request, version="v0.4")
|
|
563
632
|
|
|
@@ -567,6 +636,9 @@ class Agent:
|
|
|
567
636
|
async def handle_v07_traces(self, request: Request) -> web.Response:
|
|
568
637
|
return await self._handle_traces(request, version="v0.7")
|
|
569
638
|
|
|
639
|
+
async def handle_v1_traces(self, request: Request) -> web.Response:
|
|
640
|
+
return await self._handle_traces(request, version="v1")
|
|
641
|
+
|
|
570
642
|
async def handle_v06_tracestats(self, request: Request) -> web.Response:
|
|
571
643
|
stats = self._decode_v06_tracestats(request)
|
|
572
644
|
nstats = len(stats["Stats"])
|
|
@@ -581,6 +653,36 @@ class Agent:
|
|
|
581
653
|
log.info("received /v0.1/pipeline_stats payload")
|
|
582
654
|
return web.HTTPOk()
|
|
583
655
|
|
|
656
|
+
async def handle_v1_logs(self, request: Request) -> web.Response:
|
|
657
|
+
logs_data = self._decode_v1_logs(request)
|
|
658
|
+
num_resource_logs = len(logs_data.get("resource_logs", []))
|
|
659
|
+
total_log_records = sum(
|
|
660
|
+
len(scope_log.get("log_records", []))
|
|
661
|
+
for resource_log in logs_data.get("resource_logs", [])
|
|
662
|
+
for scope_log in resource_log.get("scope_logs", [])
|
|
663
|
+
)
|
|
664
|
+
log.info(
|
|
665
|
+
"received /v1/logs payload with %r resource log(s) containing %r log record(s)",
|
|
666
|
+
num_resource_logs,
|
|
667
|
+
total_log_records,
|
|
668
|
+
)
|
|
669
|
+
return web.HTTPOk()
|
|
670
|
+
|
|
671
|
+
async def handle_v1_metrics(self, request: Request) -> web.Response:
|
|
672
|
+
metrics_data = self._decode_v1_metrics(request)
|
|
673
|
+
num_resource_metrics = len(metrics_data.get("resource_metrics", []))
|
|
674
|
+
total_metrics = sum(
|
|
675
|
+
len(scope_metric.get("metrics", []))
|
|
676
|
+
for resource_metric in metrics_data.get("resource_metrics", [])
|
|
677
|
+
for scope_metric in resource_metric.get("scope_metrics", [])
|
|
678
|
+
)
|
|
679
|
+
log.info(
|
|
680
|
+
"received /v1/metrics payload with %r resource metric(s) containing %r metric(s)",
|
|
681
|
+
num_resource_metrics,
|
|
682
|
+
total_metrics,
|
|
683
|
+
)
|
|
684
|
+
return web.HTTPOk()
|
|
685
|
+
|
|
584
686
|
async def handle_v07_remoteconfig(self, request: Request) -> web.Response:
|
|
585
687
|
"""Emulates Remote Config endpoint: /v0.7/config"""
|
|
586
688
|
token = _session_token(request)
|
|
@@ -732,7 +834,7 @@ class Agent:
|
|
|
732
834
|
headers={"Datadog-Agent-State": "03e868b3ecdd62a91423cc4c3917d0d151fb9fa486736911ab7f5a0750c63824"},
|
|
733
835
|
)
|
|
734
836
|
|
|
735
|
-
async def _handle_traces(self, request: Request, version: Literal["v0.4", "v0.5", "v0.7"]) -> web.Response:
|
|
837
|
+
async def _handle_traces(self, request: Request, version: Literal["v0.4", "v0.5", "v0.7", "v1"]) -> web.Response:
|
|
736
838
|
token = request["session_token"]
|
|
737
839
|
checks: Checks = request.app["checks"]
|
|
738
840
|
headers = request.headers
|
|
@@ -753,6 +855,8 @@ class Agent:
|
|
|
753
855
|
traces = self._decode_v05_traces(request)
|
|
754
856
|
elif version == "v0.7":
|
|
755
857
|
traces = self._decode_v07_traces(request)
|
|
858
|
+
elif version == "v1":
|
|
859
|
+
traces = self._decode_v1_traces(request)
|
|
756
860
|
log.info(
|
|
757
861
|
"received trace for token %r payload with %r trace chunks",
|
|
758
862
|
token,
|
|
@@ -941,6 +1045,16 @@ class Agent:
|
|
|
941
1045
|
stats = await self._tracestats_by_session(token)
|
|
942
1046
|
return web.json_response(stats)
|
|
943
1047
|
|
|
1048
|
+
async def handle_session_logs(self, request: Request) -> web.Response:
|
|
1049
|
+
token = request["session_token"]
|
|
1050
|
+
logs = await self._logs_by_session(token)
|
|
1051
|
+
return web.json_response(logs)
|
|
1052
|
+
|
|
1053
|
+
async def handle_session_metrics(self, request: Request) -> web.Response:
|
|
1054
|
+
token = request["session_token"]
|
|
1055
|
+
metrics = await self._metrics_by_session(token)
|
|
1056
|
+
return web.json_response(metrics)
|
|
1057
|
+
|
|
944
1058
|
async def handle_session_requests(self, request: Request) -> web.Response:
|
|
945
1059
|
token = request["session_token"]
|
|
946
1060
|
resp = []
|
|
@@ -957,6 +1071,8 @@ class Agent:
|
|
|
957
1071
|
self.handle_v1_tracer_flare,
|
|
958
1072
|
self.handle_evp_proxy_v2_api_v2_llmobs,
|
|
959
1073
|
self.handle_evp_proxy_v2_llmobs_eval_metric,
|
|
1074
|
+
self.handle_v1_logs,
|
|
1075
|
+
self.handle_v1_metrics,
|
|
960
1076
|
):
|
|
961
1077
|
continue
|
|
962
1078
|
resp.append(
|
|
@@ -1174,6 +1290,59 @@ class Agent:
|
|
|
1174
1290
|
return response
|
|
1175
1291
|
|
|
1176
1292
|
|
|
1293
|
+
def make_otlp_http_app(agent: Agent) -> web.Application:
|
|
1294
|
+
"""Create a separate HTTP application for OTLP endpoints using the shared agent instance."""
|
|
1295
|
+
|
|
1296
|
+
@middleware
|
|
1297
|
+
async def otlp_store_request_middleware(request: Request, handler: _Handler) -> web.Response:
|
|
1298
|
+
# Always store requests for OTLP endpoints
|
|
1299
|
+
await agent._store_request(request)
|
|
1300
|
+
return await handler(request)
|
|
1301
|
+
|
|
1302
|
+
app = web.Application(
|
|
1303
|
+
middlewares=[
|
|
1304
|
+
otlp_store_request_middleware, # type: ignore
|
|
1305
|
+
session_token_middleware, # type: ignore
|
|
1306
|
+
],
|
|
1307
|
+
)
|
|
1308
|
+
|
|
1309
|
+
# Add only OTLP HTTP endpoints
|
|
1310
|
+
app.add_routes(
|
|
1311
|
+
[
|
|
1312
|
+
web.post(LOGS_ENDPOINT, agent.handle_v1_logs),
|
|
1313
|
+
web.post(METRICS_ENDPOINT, agent.handle_v1_metrics),
|
|
1314
|
+
web.get("/test/session/requests", agent.handle_session_requests),
|
|
1315
|
+
web.get("/test/session/logs", agent.handle_session_logs),
|
|
1316
|
+
web.get("/test/session/metrics", agent.handle_session_metrics),
|
|
1317
|
+
web.get("/test/session/clear", agent.handle_session_clear),
|
|
1318
|
+
web.get("/test/session/start", agent.handle_session_start),
|
|
1319
|
+
]
|
|
1320
|
+
)
|
|
1321
|
+
|
|
1322
|
+
return app
|
|
1323
|
+
|
|
1324
|
+
|
|
1325
|
+
async def make_otlp_grpc_server_async(agent: Agent, http_port: int, grpc_port: int) -> Any:
|
|
1326
|
+
"""Create and start a separate GRPC server for OTLP endpoints that forwards to HTTP server."""
|
|
1327
|
+
# Define the servicer class only when GRPC is available
|
|
1328
|
+
server = grpc_aio.server()
|
|
1329
|
+
|
|
1330
|
+
# Add the OTLP logs servicer
|
|
1331
|
+
logs_servicer = OTLPLogsGRPCServicer(http_port)
|
|
1332
|
+
add_LogsServiceServicer_to_server(logs_servicer, server)
|
|
1333
|
+
|
|
1334
|
+
# Add the OTLP metrics servicer
|
|
1335
|
+
metrics_servicer = OTLPMetricsGRPCServicer(http_port)
|
|
1336
|
+
add_MetricsServiceServicer_to_server(metrics_servicer, server)
|
|
1337
|
+
|
|
1338
|
+
# Setup and start the server
|
|
1339
|
+
listen_addr = f"[::]:{grpc_port}"
|
|
1340
|
+
server.add_insecure_port(listen_addr)
|
|
1341
|
+
await server.start()
|
|
1342
|
+
|
|
1343
|
+
return server
|
|
1344
|
+
|
|
1345
|
+
|
|
1177
1346
|
def make_app(
|
|
1178
1347
|
enabled_checks: List[str],
|
|
1179
1348
|
log_span_fmt: str,
|
|
@@ -1209,6 +1378,8 @@ def make_app(
|
|
|
1209
1378
|
web.put("/v0.5/traces", agent.handle_v05_traces),
|
|
1210
1379
|
web.post("/v0.7/traces", agent.handle_v07_traces),
|
|
1211
1380
|
web.put("/v0.7/traces", agent.handle_v07_traces),
|
|
1381
|
+
web.post("/v1.0/traces", agent.handle_v1_traces),
|
|
1382
|
+
web.put("/v1.0/traces", agent.handle_v1_traces),
|
|
1212
1383
|
web.post("/v0.6/stats", agent.handle_v06_tracestats),
|
|
1213
1384
|
web.post("/v0.1/pipeline_stats", agent.handle_v01_pipelinestats),
|
|
1214
1385
|
web.put("/v0.6/stats", agent.handle_v06_tracestats),
|
|
@@ -1262,6 +1433,7 @@ def make_app(
|
|
|
1262
1433
|
],
|
|
1263
1434
|
enabled=enabled_checks,
|
|
1264
1435
|
)
|
|
1436
|
+
app["agent"] = agent
|
|
1265
1437
|
app["checks"] = checks
|
|
1266
1438
|
app["snapshot_dir"] = snapshot_dir
|
|
1267
1439
|
app["snapshot_ci_mode"] = snapshot_ci_mode
|
|
@@ -1293,6 +1465,18 @@ def main(args: Optional[List[str]] = None) -> None:
|
|
|
1293
1465
|
help="Print version info and exit.",
|
|
1294
1466
|
)
|
|
1295
1467
|
parser.add_argument("-p", "--port", type=int, default=int(os.environ.get("PORT", 8126)))
|
|
1468
|
+
parser.add_argument(
|
|
1469
|
+
"--otlp-http-port",
|
|
1470
|
+
type=int,
|
|
1471
|
+
default=int(os.environ.get("OTLP_HTTP_PORT", 4318)),
|
|
1472
|
+
help="Port to listen for OTLP HTTP requests (default: 4318)",
|
|
1473
|
+
)
|
|
1474
|
+
parser.add_argument(
|
|
1475
|
+
"--otlp-grpc-port",
|
|
1476
|
+
type=int,
|
|
1477
|
+
default=int(os.environ.get("OTLP_GRPC_PORT", 4317)),
|
|
1478
|
+
help="Port to listen for OTLP GRPC requests (default: 4317)",
|
|
1479
|
+
)
|
|
1296
1480
|
parser.add_argument(
|
|
1297
1481
|
"--snapshot-dir",
|
|
1298
1482
|
type=str,
|
|
@@ -1444,7 +1628,60 @@ def main(args: Optional[List[str]] = None) -> None:
|
|
|
1444
1628
|
vcr_cassettes_directory=parsed_args.vcr_cassettes_directory,
|
|
1445
1629
|
)
|
|
1446
1630
|
|
|
1447
|
-
|
|
1631
|
+
# Validate port configuration
|
|
1632
|
+
if parsed_args.port == parsed_args.otlp_http_port:
|
|
1633
|
+
raise ValueError("APM and OTLP HTTP ports cannot be the same")
|
|
1634
|
+
if parsed_args.port == parsed_args.otlp_grpc_port:
|
|
1635
|
+
raise ValueError("APM and OTLP GRPC ports cannot be the same")
|
|
1636
|
+
if parsed_args.otlp_http_port == parsed_args.otlp_grpc_port:
|
|
1637
|
+
raise ValueError("OTLP HTTP and GRPC ports cannot be the same")
|
|
1638
|
+
|
|
1639
|
+
# Get the shared agent instance from the main app
|
|
1640
|
+
agent = app["agent"]
|
|
1641
|
+
otlp_http_app = make_otlp_http_app(agent)
|
|
1642
|
+
|
|
1643
|
+
async def run_servers():
|
|
1644
|
+
"""Run APM and OTLP HTTP servers concurrently."""
|
|
1645
|
+
# Create runners for both apps
|
|
1646
|
+
apm_runner = web.AppRunner(app)
|
|
1647
|
+
await apm_runner.setup()
|
|
1648
|
+
|
|
1649
|
+
otlp_http_runner = web.AppRunner(otlp_http_app)
|
|
1650
|
+
await otlp_http_runner.setup()
|
|
1651
|
+
|
|
1652
|
+
# Start GRPC server if available (async creation)
|
|
1653
|
+
otlp_grpc_server = await make_otlp_grpc_server_async(
|
|
1654
|
+
agent, parsed_args.otlp_http_port, parsed_args.otlp_grpc_port
|
|
1655
|
+
)
|
|
1656
|
+
|
|
1657
|
+
# Create sites for both apps
|
|
1658
|
+
if apm_sock:
|
|
1659
|
+
apm_site = web.SockSite(apm_runner, apm_sock)
|
|
1660
|
+
else:
|
|
1661
|
+
apm_site = web.TCPSite(apm_runner, port=parsed_args.port)
|
|
1662
|
+
|
|
1663
|
+
otlp_http_site = web.TCPSite(otlp_http_runner, port=parsed_args.otlp_http_port)
|
|
1664
|
+
|
|
1665
|
+
# Start both servers concurrently
|
|
1666
|
+
await asyncio.gather(apm_site.start(), otlp_http_site.start())
|
|
1667
|
+
|
|
1668
|
+
print(f"======== Running APM server on port {parsed_args.port} ========")
|
|
1669
|
+
print(f"======== Running OTLP HTTP server on port {parsed_args.otlp_http_port} ========")
|
|
1670
|
+
print(f"======== Running OTLP GRPC server on port {parsed_args.otlp_grpc_port} ========")
|
|
1671
|
+
print("(Press CTRL+C to quit)")
|
|
1672
|
+
|
|
1673
|
+
try:
|
|
1674
|
+
# Keep the servers running
|
|
1675
|
+
await asyncio.Event().wait()
|
|
1676
|
+
except KeyboardInterrupt:
|
|
1677
|
+
pass
|
|
1678
|
+
finally:
|
|
1679
|
+
await apm_runner.cleanup()
|
|
1680
|
+
await otlp_http_runner.cleanup()
|
|
1681
|
+
await otlp_grpc_server.stop(grace=5.0)
|
|
1682
|
+
|
|
1683
|
+
# Run the servers
|
|
1684
|
+
asyncio.run(run_servers())
|
|
1448
1685
|
|
|
1449
1686
|
|
|
1450
1687
|
if __name__ == "__main__":
|
ddapm_test_agent/client.py
CHANGED
|
@@ -9,7 +9,7 @@ import requests
|
|
|
9
9
|
from ddapm_test_agent.trace import Trace
|
|
10
10
|
|
|
11
11
|
|
|
12
|
-
class
|
|
12
|
+
class TestClient:
|
|
13
13
|
__test__ = False
|
|
14
14
|
|
|
15
15
|
def __init__(self, base_url: str):
|
|
@@ -19,6 +19,28 @@ class TestAgentClient:
|
|
|
19
19
|
def _url(self, path: str) -> str:
|
|
20
20
|
return urllib.parse.urljoin(self._base_url, path)
|
|
21
21
|
|
|
22
|
+
def requests(self, **kwargs: Any) -> List[Any]:
|
|
23
|
+
resp = self._session.get(self._url("/test/session/requests"), **kwargs)
|
|
24
|
+
json = resp.json()
|
|
25
|
+
return cast(List[Any], json)
|
|
26
|
+
|
|
27
|
+
def clear(self, **kwargs: Any) -> None:
|
|
28
|
+
self._session.get(self._url("/test/session/clear"), **kwargs)
|
|
29
|
+
|
|
30
|
+
def wait_to_start(self, num_tries: int = 50, delay: float = 0.1) -> None:
|
|
31
|
+
exc = []
|
|
32
|
+
for i in range(num_tries):
|
|
33
|
+
try:
|
|
34
|
+
self.requests()
|
|
35
|
+
except requests.exceptions.RequestException as e:
|
|
36
|
+
exc.append(e)
|
|
37
|
+
time.sleep(delay)
|
|
38
|
+
else:
|
|
39
|
+
return
|
|
40
|
+
raise AssertionError(f"Test agent did not start in time ({num_tries * delay} seconds). Got {exc[-1]}")
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class TestAgentClient(TestClient):
|
|
22
44
|
def traces(self, clear: bool = False, **kwargs: Any) -> List[Trace]:
|
|
23
45
|
resp = self._session.get(self._url("/test/session/traces"), **kwargs)
|
|
24
46
|
if clear:
|
|
@@ -26,11 +48,6 @@ class TestAgentClient:
|
|
|
26
48
|
json = resp.json()
|
|
27
49
|
return cast(List[Trace], json)
|
|
28
50
|
|
|
29
|
-
def requests(self, **kwargs: Any) -> List[Any]:
|
|
30
|
-
resp = self._session.get(self._url("/test/session/requests"), **kwargs)
|
|
31
|
-
json = resp.json()
|
|
32
|
-
return cast(List[Any], json)
|
|
33
|
-
|
|
34
51
|
def raw_telemetry(self, clear: bool = False) -> List[Any]:
|
|
35
52
|
raw_reqs = self.requests()
|
|
36
53
|
reqs = []
|
|
@@ -47,9 +64,6 @@ class TestAgentClient:
|
|
|
47
64
|
self.clear()
|
|
48
65
|
return cast(List[Any], resp.json())
|
|
49
66
|
|
|
50
|
-
def clear(self, **kwargs: Any) -> None:
|
|
51
|
-
self._session.get(self._url("/test/session/clear"), **kwargs)
|
|
52
|
-
|
|
53
67
|
def info(self, **kwargs):
|
|
54
68
|
resp = self._session.get(self._url("/info"), **kwargs)
|
|
55
69
|
json = resp.json()
|
|
@@ -126,14 +140,43 @@ class TestAgentClient:
|
|
|
126
140
|
time.sleep(0.01)
|
|
127
141
|
raise AssertionError("Telemetry event %r not found" % event_name)
|
|
128
142
|
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
143
|
+
|
|
144
|
+
class TestOTLPClient(TestClient):
|
|
145
|
+
def __init__(self, host: str = "127.0.0.1", http_port: int = 4318, scheme: str = "http"):
|
|
146
|
+
# OTLP grpc server will forward all requests to the http server
|
|
147
|
+
# so we can use the same client to receive logs for both http and grpc endpoints
|
|
148
|
+
super().__init__(f"{scheme}://{host}:{http_port}")
|
|
149
|
+
|
|
150
|
+
def logs(self, clear: bool = False, **kwargs: Any) -> List[Any]:
|
|
151
|
+
resp = self._session.get(self._url("/test/session/logs"), **kwargs)
|
|
152
|
+
if clear:
|
|
153
|
+
self.clear()
|
|
154
|
+
return cast(List[Any], resp.json())
|
|
155
|
+
|
|
156
|
+
def wait_for_num_logs(self, num: int, clear: bool = False, wait_loops: int = 30) -> List[Any]:
|
|
157
|
+
"""Wait for `num` logs to be received from the test agent."""
|
|
158
|
+
for _ in range(wait_loops):
|
|
159
|
+
logs = self.logs(clear=False)
|
|
160
|
+
if len(logs) == num:
|
|
161
|
+
if clear:
|
|
162
|
+
self.clear()
|
|
163
|
+
return logs
|
|
164
|
+
time.sleep(0.1)
|
|
165
|
+
raise ValueError("Number (%r) of logs not available from test agent, got %r" % (num, len(logs)))
|
|
166
|
+
|
|
167
|
+
def metrics(self, clear: bool = False, **kwargs: Any) -> List[Any]:
|
|
168
|
+
resp = self._session.get(self._url("/test/session/metrics"), **kwargs)
|
|
169
|
+
if clear:
|
|
170
|
+
self.clear()
|
|
171
|
+
return cast(List[Any], resp.json())
|
|
172
|
+
|
|
173
|
+
def wait_for_num_metrics(self, num: int, clear: bool = False, wait_loops: int = 30) -> List[Any]:
|
|
174
|
+
"""Wait for `num` metrics to be received from the test agent."""
|
|
175
|
+
for _ in range(wait_loops):
|
|
176
|
+
metrics = self.metrics(clear=False)
|
|
177
|
+
if len(metrics) == num:
|
|
178
|
+
if clear:
|
|
179
|
+
self.clear()
|
|
180
|
+
return metrics
|
|
181
|
+
time.sleep(0.1)
|
|
182
|
+
raise ValueError("Number (%r) of metrics not available from test agent, got %r" % (num, len(metrics)))
|
ddapm_test_agent/logs.py
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
"""OTLP Logs handling for the test agent."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
from typing import Any
|
|
6
|
+
from typing import Dict
|
|
7
|
+
|
|
8
|
+
from aiohttp import ClientSession
|
|
9
|
+
from google.protobuf.json_format import MessageToDict
|
|
10
|
+
from grpc import aio as grpc_aio
|
|
11
|
+
from opentelemetry.proto.collector.logs.v1.logs_service_pb2 import ExportLogsServiceRequest
|
|
12
|
+
from opentelemetry.proto.collector.logs.v1.logs_service_pb2 import ExportLogsServiceResponse
|
|
13
|
+
from opentelemetry.proto.collector.logs.v1.logs_service_pb2_grpc import LogsServiceServicer
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
LOGS_ENDPOINT = "/v1/logs"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
log = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def decode_logs_request(request_body: bytes, content_type: str) -> Dict[str, Any]:
|
|
23
|
+
if content_type == "application/json":
|
|
24
|
+
parsed_json = json.loads(request_body)
|
|
25
|
+
if not isinstance(parsed_json, dict):
|
|
26
|
+
raise Exception("JSON payload must be an object")
|
|
27
|
+
return parsed_json
|
|
28
|
+
elif content_type == "application/x-protobuf":
|
|
29
|
+
export_request = ExportLogsServiceRequest()
|
|
30
|
+
export_request.ParseFromString(request_body)
|
|
31
|
+
return protobuf_to_dict(export_request)
|
|
32
|
+
else:
|
|
33
|
+
raise ValueError(f"Content-Type must be application/x-protobuf or application/json, got {content_type}")
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def protobuf_to_dict(pb_obj: Any) -> Dict[str, Any]:
|
|
37
|
+
return MessageToDict(pb_obj, preserving_proto_field_name=True)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class OTLPLogsGRPCServicer(LogsServiceServicer):
|
|
41
|
+
|
|
42
|
+
def __init__(self, http_port: int):
|
|
43
|
+
self.http_url = f"http://127.0.0.1:{http_port}"
|
|
44
|
+
|
|
45
|
+
async def Export(
|
|
46
|
+
self, request: ExportLogsServiceRequest, context: grpc_aio.ServicerContext
|
|
47
|
+
) -> ExportLogsServiceResponse:
|
|
48
|
+
try:
|
|
49
|
+
protobuf_data = request.SerializeToString()
|
|
50
|
+
headers = {"Content-Type": "application/x-protobuf"}
|
|
51
|
+
metadata = dict(context.invocation_metadata())
|
|
52
|
+
if "session-token" in metadata:
|
|
53
|
+
headers["Session-Token"] = metadata["session-token"]
|
|
54
|
+
async with ClientSession(self.http_url) as session:
|
|
55
|
+
async with session.post(LOGS_ENDPOINT, headers=headers, data=protobuf_data) as resp:
|
|
56
|
+
context.set_trailing_metadata([("http-status", str(resp.status))])
|
|
57
|
+
response = ExportLogsServiceResponse()
|
|
58
|
+
if resp.status >= 400:
|
|
59
|
+
response.partial_success.rejected_log_records = len(request.resource_logs)
|
|
60
|
+
response.partial_success.error_message = f"HTTP {resp.status}: {await resp.text()}"
|
|
61
|
+
return response
|
|
62
|
+
except Exception as e:
|
|
63
|
+
context.set_trailing_metadata([("http-status", "500"), ("error", str(e))])
|
|
64
|
+
response = ExportLogsServiceResponse()
|
|
65
|
+
response.partial_success.rejected_log_records = len(request.resource_logs)
|
|
66
|
+
response.partial_success.error_message = f"Forward failed: {str(e)}"
|
|
67
|
+
return response
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
"""OTLP Metrics handling for the test agent."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
from typing import Any
|
|
6
|
+
from typing import Dict
|
|
7
|
+
|
|
8
|
+
from aiohttp import ClientSession
|
|
9
|
+
from google.protobuf.json_format import MessageToDict
|
|
10
|
+
from grpc import aio as grpc_aio
|
|
11
|
+
from opentelemetry.proto.collector.metrics.v1.metrics_service_pb2 import ExportMetricsServiceRequest
|
|
12
|
+
from opentelemetry.proto.collector.metrics.v1.metrics_service_pb2 import ExportMetricsServiceResponse
|
|
13
|
+
from opentelemetry.proto.collector.metrics.v1.metrics_service_pb2_grpc import MetricsServiceServicer
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
METRICS_ENDPOINT = "/v1/metrics"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
log = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def decode_metrics_request(request_body: bytes, content_type: str) -> Dict[str, Any]:
|
|
23
|
+
if content_type == "application/json":
|
|
24
|
+
parsed_json = json.loads(request_body)
|
|
25
|
+
if not isinstance(parsed_json, dict):
|
|
26
|
+
raise Exception("JSON payload must be an object")
|
|
27
|
+
return parsed_json
|
|
28
|
+
elif content_type == "application/x-protobuf":
|
|
29
|
+
export_request = ExportMetricsServiceRequest()
|
|
30
|
+
export_request.ParseFromString(request_body)
|
|
31
|
+
return protobuf_to_dict(export_request)
|
|
32
|
+
else:
|
|
33
|
+
raise ValueError(f"Content-Type must be application/x-protobuf or application/json, got {content_type}")
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def protobuf_to_dict(pb_obj: Any) -> Dict[str, Any]:
|
|
37
|
+
return MessageToDict(pb_obj, preserving_proto_field_name=True)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class OTLPMetricsGRPCServicer(MetricsServiceServicer):
|
|
41
|
+
|
|
42
|
+
def __init__(self, http_port: int):
|
|
43
|
+
self.http_url = f"http://127.0.0.1:{http_port}"
|
|
44
|
+
|
|
45
|
+
def _count_data_points(self, request: ExportMetricsServiceRequest) -> int:
|
|
46
|
+
return len(
|
|
47
|
+
[
|
|
48
|
+
dp
|
|
49
|
+
for rm in request.resource_metrics
|
|
50
|
+
for sm in rm.scope_metrics
|
|
51
|
+
for m in sm.metrics
|
|
52
|
+
for dp in (
|
|
53
|
+
m.gauge.data_points
|
|
54
|
+
if m.HasField("gauge")
|
|
55
|
+
else (
|
|
56
|
+
m.sum.data_points
|
|
57
|
+
if m.HasField("sum")
|
|
58
|
+
else (
|
|
59
|
+
m.histogram.data_points
|
|
60
|
+
if m.HasField("histogram")
|
|
61
|
+
else (
|
|
62
|
+
m.exponential_histogram.data_points
|
|
63
|
+
if m.HasField("exponential_histogram")
|
|
64
|
+
else m.summary.data_points if m.HasField("summary") else []
|
|
65
|
+
)
|
|
66
|
+
)
|
|
67
|
+
)
|
|
68
|
+
)
|
|
69
|
+
]
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
async def Export(
|
|
73
|
+
self, request: ExportMetricsServiceRequest, context: grpc_aio.ServicerContext
|
|
74
|
+
) -> ExportMetricsServiceResponse:
|
|
75
|
+
try:
|
|
76
|
+
protobuf_data = request.SerializeToString()
|
|
77
|
+
headers = {"Content-Type": "application/x-protobuf"}
|
|
78
|
+
metadata = dict(context.invocation_metadata())
|
|
79
|
+
if "session-token" in metadata:
|
|
80
|
+
headers["Session-Token"] = metadata["session-token"]
|
|
81
|
+
async with ClientSession(self.http_url) as session:
|
|
82
|
+
async with session.post(METRICS_ENDPOINT, headers=headers, data=protobuf_data) as resp:
|
|
83
|
+
context.set_trailing_metadata([("http-status", str(resp.status))])
|
|
84
|
+
response = ExportMetricsServiceResponse()
|
|
85
|
+
if resp.status >= 400:
|
|
86
|
+
response.partial_success.rejected_data_points = self._count_data_points(request)
|
|
87
|
+
response.partial_success.error_message = f"HTTP {resp.status}: {await resp.text()}"
|
|
88
|
+
return response
|
|
89
|
+
except Exception as e:
|
|
90
|
+
context.set_trailing_metadata([("http-status", "500"), ("error", str(e))])
|
|
91
|
+
response = ExportMetricsServiceResponse()
|
|
92
|
+
response.partial_success.rejected_data_points = self._count_data_points(request)
|
|
93
|
+
response.partial_success.error_message = f"Forward failed: {str(e)}"
|
|
94
|
+
return response
|
ddapm_test_agent/trace.py
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
"""Tracing specific functions and types"""
|
|
2
2
|
|
|
3
|
+
from enum import IntEnum
|
|
3
4
|
import json
|
|
4
5
|
from typing import Any
|
|
5
6
|
from typing import Callable
|
|
@@ -114,6 +115,59 @@ v04TracePayload = List[List[Span]]
|
|
|
114
115
|
TraceMap = OrderedDict[int, Trace]
|
|
115
116
|
|
|
116
117
|
|
|
118
|
+
class V1ChunkKeys(IntEnum):
|
|
119
|
+
PRIORITY = 1
|
|
120
|
+
ORIGIN = 2
|
|
121
|
+
ATTRIBUTES = 3
|
|
122
|
+
SPANS = 4
|
|
123
|
+
DROPPED_TRACE = 5
|
|
124
|
+
TRACE_ID = 6
|
|
125
|
+
SAMPLING_MECHANISM = 7
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
class V1SpanKeys(IntEnum):
|
|
129
|
+
SERVICE = 1
|
|
130
|
+
NAME = 2
|
|
131
|
+
RESOURCE = 3
|
|
132
|
+
SPAN_ID = 4
|
|
133
|
+
PARENT_ID = 5
|
|
134
|
+
START = 6
|
|
135
|
+
DURATION = 7
|
|
136
|
+
ERROR = 8
|
|
137
|
+
ATTRIBUTES = 9
|
|
138
|
+
TYPE = 10
|
|
139
|
+
SPAN_LINKS = 11
|
|
140
|
+
SPAN_EVENTS = 12
|
|
141
|
+
ENV = 13
|
|
142
|
+
VERSION = 14
|
|
143
|
+
COMPONENT = 15
|
|
144
|
+
SPAN_KIND = 16
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
class V1SpanLinkKeys(IntEnum):
|
|
148
|
+
TRACE_ID = 1
|
|
149
|
+
SPAN_ID = 2
|
|
150
|
+
ATTRIBUTES = 3
|
|
151
|
+
TRACE_STATE = 4
|
|
152
|
+
FLAGS = 5
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
class V1SpanEventKeys(IntEnum):
|
|
156
|
+
TIME = 1
|
|
157
|
+
NAME = 2
|
|
158
|
+
ATTRIBUTES = 3
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
class V1AnyValueKeys(IntEnum):
|
|
162
|
+
STRING = 1
|
|
163
|
+
BOOL = 2
|
|
164
|
+
DOUBLE = 3
|
|
165
|
+
INT = 4
|
|
166
|
+
BYTES = 5
|
|
167
|
+
ARRAY = 6
|
|
168
|
+
KEY_VALUE_LIST = 7
|
|
169
|
+
|
|
170
|
+
|
|
117
171
|
# TODO:ban add extra tags to add to the span
|
|
118
172
|
# TODO:ban warn about dropping metastruct
|
|
119
173
|
def verify_span(d: Any) -> Span:
|
|
@@ -702,6 +756,351 @@ def decode_v07(data: bytes) -> v04TracePayload:
|
|
|
702
756
|
return _verify_v07_payload(payload)
|
|
703
757
|
|
|
704
758
|
|
|
759
|
+
def decode_v1(data: bytes) -> v04TracePayload:
|
|
760
|
+
"""Decode a v1 trace payload.
|
|
761
|
+
The v1 format is similar to the v07 format but in an optimized format and with a few changes:
|
|
762
|
+
- Strings are deduplicated and sent in a "Streaming" format where strings are referred to by their index in a string table
|
|
763
|
+
- Trace IDs are sent as 128 bit integers in a bytes array
|
|
764
|
+
- 'meta' and 'metrics' are now sent as typed 'attributes', more similar to how OTLP traces are sent
|
|
765
|
+
"""
|
|
766
|
+
payload = msgpack.unpackb(data, strict_map_key=False)
|
|
767
|
+
return _convert_v1_payload(payload)
|
|
768
|
+
|
|
769
|
+
|
|
770
|
+
def _get_and_add_string(string_table: List[str], value: Union[int, str]) -> str:
|
|
771
|
+
if isinstance(value, str):
|
|
772
|
+
string_table.append(value)
|
|
773
|
+
return value
|
|
774
|
+
elif isinstance(value, int):
|
|
775
|
+
if value >= len(string_table) or value < 0:
|
|
776
|
+
raise ValueError(f"Value {value} is out of range for string table of length {len(string_table)}")
|
|
777
|
+
return string_table[value]
|
|
778
|
+
|
|
779
|
+
|
|
780
|
+
def _convert_v1_payload(data: Any) -> v04TracePayload:
|
|
781
|
+
if not isinstance(data, dict):
|
|
782
|
+
raise TypeError("Trace payload must be a map, got type %r." % type(data))
|
|
783
|
+
|
|
784
|
+
string_table: List[str] = [""] # 0 is reserved for empty string
|
|
785
|
+
|
|
786
|
+
v04Payload: List[List[Span]] = []
|
|
787
|
+
|
|
788
|
+
for k, v in data.items():
|
|
789
|
+
if k == 1:
|
|
790
|
+
raise TypeError("Message pack representation of v1 trace payload must stream strings")
|
|
791
|
+
elif k > 1 and k < 10: # All keys from 2-9 are strings, for now we can just build the string table
|
|
792
|
+
# TODO: In the future we can assert on these keys
|
|
793
|
+
if isinstance(v, str):
|
|
794
|
+
string_table.append(v)
|
|
795
|
+
elif k == 11:
|
|
796
|
+
if not isinstance(v, list):
|
|
797
|
+
raise TypeError("Trace payload 'chunks' (11) must be a list.")
|
|
798
|
+
for chunk in v:
|
|
799
|
+
v04Payload.append(_convert_v1_chunk(chunk, string_table))
|
|
800
|
+
else:
|
|
801
|
+
raise TypeError("Unknown key %r in v1 trace payload" % k)
|
|
802
|
+
return cast(v04TracePayload, v04Payload)
|
|
803
|
+
|
|
804
|
+
|
|
805
|
+
def _convert_v1_chunk(chunk: Any, string_table: List[str]) -> List[Span]:
|
|
806
|
+
if not isinstance(chunk, dict):
|
|
807
|
+
raise TypeError("Chunk must be a map.")
|
|
808
|
+
|
|
809
|
+
priority, origin, sampling_mechanism = "", "", None
|
|
810
|
+
trace_id, trace_id_high = 0, 0
|
|
811
|
+
meta: Dict[str, str] = {}
|
|
812
|
+
metrics: Dict[str, MetricType] = {}
|
|
813
|
+
spans: List[Span] = []
|
|
814
|
+
for k, v in chunk.items():
|
|
815
|
+
if k == V1ChunkKeys.PRIORITY:
|
|
816
|
+
priority = v
|
|
817
|
+
elif k == V1ChunkKeys.ORIGIN:
|
|
818
|
+
origin = _get_and_add_string(string_table, v)
|
|
819
|
+
elif k == V1ChunkKeys.ATTRIBUTES:
|
|
820
|
+
if not isinstance(v, list):
|
|
821
|
+
raise TypeError("Chunk Attributes must be a list, got type %r." % type(v))
|
|
822
|
+
_convert_v1_attributes(v, meta, metrics, string_table)
|
|
823
|
+
elif k == V1ChunkKeys.SPANS:
|
|
824
|
+
if not isinstance(v, list):
|
|
825
|
+
raise TypeError("Chunk 'spans'(4) must be a list.")
|
|
826
|
+
for span in v:
|
|
827
|
+
converted_span = _convert_v1_span(span, string_table)
|
|
828
|
+
spans.append(converted_span)
|
|
829
|
+
elif k == V1ChunkKeys.DROPPED_TRACE:
|
|
830
|
+
raise TypeError("Tracers must not set the droppedTrace(5) flag.")
|
|
831
|
+
elif k == V1ChunkKeys.TRACE_ID:
|
|
832
|
+
if len(v) != 16:
|
|
833
|
+
raise TypeError("Trace ID must be 16 bytes, got %r." % len(v))
|
|
834
|
+
# trace_id is a 128 bit integer in a bytes array, so we need to get the last 64 bits
|
|
835
|
+
trace_id = int.from_bytes(v[8:], "big")
|
|
836
|
+
trace_id_high = int.from_bytes(v[:8], "big")
|
|
837
|
+
elif k == V1ChunkKeys.SAMPLING_MECHANISM:
|
|
838
|
+
sampling_mechanism = v
|
|
839
|
+
else:
|
|
840
|
+
raise TypeError("Unknown key %r in v1 trace chunk" % k)
|
|
841
|
+
|
|
842
|
+
for span in spans:
|
|
843
|
+
if "metrics" not in span:
|
|
844
|
+
span["metrics"] = {}
|
|
845
|
+
if "meta" not in span:
|
|
846
|
+
span["meta"] = {}
|
|
847
|
+
span["trace_id"] = trace_id
|
|
848
|
+
span["meta"]["_dd.p.tid"] = hex(trace_id_high)
|
|
849
|
+
if sampling_mechanism is not None:
|
|
850
|
+
span["meta"]["_dd.p.dm"] = "-" + str(sampling_mechanism)
|
|
851
|
+
if origin != "":
|
|
852
|
+
span["meta"]["_dd.origin"] = origin
|
|
853
|
+
if priority != "":
|
|
854
|
+
span["metrics"]["_sampling_priority_v1"] = priority
|
|
855
|
+
for k, v in meta.items():
|
|
856
|
+
span["meta"][k] = v
|
|
857
|
+
for k, v in metrics.items():
|
|
858
|
+
span["metrics"][k] = v
|
|
859
|
+
return spans
|
|
860
|
+
|
|
861
|
+
|
|
862
|
+
def _convert_v1_span(span: Any, string_table: List[str]) -> Span:
|
|
863
|
+
if not isinstance(span, dict):
|
|
864
|
+
raise TypeError("Span must be a map.")
|
|
865
|
+
|
|
866
|
+
# Create a regular dict first, then cast to TypedDict
|
|
867
|
+
v4Span: Dict[str, Any] = {}
|
|
868
|
+
env, version, component, spanKind = "", "", "", ""
|
|
869
|
+
|
|
870
|
+
for k, v in span.items():
|
|
871
|
+
if k == V1SpanKeys.SERVICE:
|
|
872
|
+
v4Span["service"] = _get_and_add_string(string_table, v)
|
|
873
|
+
elif k == V1SpanKeys.NAME:
|
|
874
|
+
v4Span["name"] = _get_and_add_string(string_table, v)
|
|
875
|
+
elif k == V1SpanKeys.RESOURCE:
|
|
876
|
+
v4Span["resource"] = _get_and_add_string(string_table, v)
|
|
877
|
+
elif k == V1SpanKeys.SPAN_ID:
|
|
878
|
+
v4Span["span_id"] = v
|
|
879
|
+
elif k == V1SpanKeys.PARENT_ID:
|
|
880
|
+
v4Span["parent_id"] = v
|
|
881
|
+
elif k == V1SpanKeys.START:
|
|
882
|
+
v4Span["start"] = v
|
|
883
|
+
elif k == V1SpanKeys.DURATION:
|
|
884
|
+
v4Span["duration"] = v
|
|
885
|
+
elif k == V1SpanKeys.ERROR:
|
|
886
|
+
if not isinstance(v, bool):
|
|
887
|
+
raise TypeError("Error must be a boolean, got type %r." % type(v))
|
|
888
|
+
v4Span["error"] = 1 if v else 0
|
|
889
|
+
elif k == V1SpanKeys.ATTRIBUTES:
|
|
890
|
+
if not isinstance(v, list):
|
|
891
|
+
raise TypeError("Attributes must be a list, got type %r." % type(v))
|
|
892
|
+
meta: Dict[str, str] = {}
|
|
893
|
+
metrics: Dict[str, MetricType] = {}
|
|
894
|
+
_convert_v1_attributes(v, meta, metrics, string_table)
|
|
895
|
+
v4Span["meta"] = meta
|
|
896
|
+
v4Span["metrics"] = metrics
|
|
897
|
+
elif k == V1SpanKeys.TYPE:
|
|
898
|
+
v4Span["type"] = _get_and_add_string(string_table, v)
|
|
899
|
+
elif k == V1SpanKeys.SPAN_LINKS:
|
|
900
|
+
if not isinstance(v, list):
|
|
901
|
+
raise TypeError("Span links must be a list, got type %r." % type(v))
|
|
902
|
+
links: List[SpanLink] = []
|
|
903
|
+
for raw_link in v:
|
|
904
|
+
link = _convert_v1_span_link(raw_link, string_table)
|
|
905
|
+
links.append(link)
|
|
906
|
+
v4Span["span_links"] = links
|
|
907
|
+
elif k == V1SpanKeys.SPAN_EVENTS:
|
|
908
|
+
if not isinstance(v, list):
|
|
909
|
+
raise TypeError("Span events must be a list, got type %r." % type(v))
|
|
910
|
+
events: List[SpanEvent] = []
|
|
911
|
+
for raw_event in v:
|
|
912
|
+
event = _convert_v1_span_event(raw_event, string_table)
|
|
913
|
+
events.append(event)
|
|
914
|
+
v4Span["span_events"] = events
|
|
915
|
+
elif k == V1SpanKeys.ENV:
|
|
916
|
+
env = _get_and_add_string(string_table, v)
|
|
917
|
+
elif k == V1SpanKeys.VERSION:
|
|
918
|
+
version = _get_and_add_string(string_table, v)
|
|
919
|
+
elif k == V1SpanKeys.COMPONENT:
|
|
920
|
+
component = _get_and_add_string(string_table, v)
|
|
921
|
+
elif k == V1SpanKeys.SPAN_KIND:
|
|
922
|
+
if not isinstance(v, int):
|
|
923
|
+
raise TypeError("Span kind must be an integer, got type %r." % type(v))
|
|
924
|
+
if v == 1:
|
|
925
|
+
spanKind = "internal"
|
|
926
|
+
elif v == 2:
|
|
927
|
+
spanKind = "server"
|
|
928
|
+
elif v == 3:
|
|
929
|
+
spanKind = "client"
|
|
930
|
+
elif v == 4:
|
|
931
|
+
spanKind = "producer"
|
|
932
|
+
elif v == 5:
|
|
933
|
+
spanKind = "consumer"
|
|
934
|
+
else:
|
|
935
|
+
raise TypeError("Unknown span kind %r." % v)
|
|
936
|
+
|
|
937
|
+
if "meta" not in v4Span or v4Span["meta"] is None:
|
|
938
|
+
v4Span["meta"] = {}
|
|
939
|
+
if env != "":
|
|
940
|
+
v4Span["meta"]["env"] = env
|
|
941
|
+
if version != "":
|
|
942
|
+
v4Span["meta"]["version"] = version
|
|
943
|
+
if component != "":
|
|
944
|
+
v4Span["meta"]["component"] = component
|
|
945
|
+
if spanKind != "":
|
|
946
|
+
v4Span["meta"]["span.kind"] = spanKind
|
|
947
|
+
|
|
948
|
+
# Cast to TypedDict
|
|
949
|
+
return v4Span # type: ignore
|
|
950
|
+
|
|
951
|
+
|
|
952
|
+
def _convert_v1_span_event(event: Any, string_table: List[str]) -> SpanEvent:
|
|
953
|
+
if not isinstance(event, dict):
|
|
954
|
+
raise TypeError("Span event must be a map, got type %r." % type(event))
|
|
955
|
+
|
|
956
|
+
# Create a regular dict first, then cast to TypedDict
|
|
957
|
+
v4Event: Dict[str, Any] = {}
|
|
958
|
+
|
|
959
|
+
for k, v in event.items():
|
|
960
|
+
if k == V1SpanEventKeys.TIME:
|
|
961
|
+
v4Event["time_unix_nano"] = v
|
|
962
|
+
elif k == V1SpanEventKeys.NAME:
|
|
963
|
+
v4Event["name"] = _get_and_add_string(string_table, v)
|
|
964
|
+
elif k == V1SpanEventKeys.ATTRIBUTES:
|
|
965
|
+
v4Event["attributes"] = _convert_v1_span_event_attributes(v, string_table)
|
|
966
|
+
else:
|
|
967
|
+
raise TypeError("Unknown key %r in v1 span event" % k)
|
|
968
|
+
|
|
969
|
+
# Cast to TypedDict
|
|
970
|
+
return v4Event # type: ignore
|
|
971
|
+
|
|
972
|
+
|
|
973
|
+
def _convert_v1_span_link(link: Any, string_table: List[str]) -> SpanLink:
|
|
974
|
+
if not isinstance(link, dict):
|
|
975
|
+
raise TypeError("Span link must be a map, got type %r." % type(link))
|
|
976
|
+
|
|
977
|
+
# Create a regular dict first, then cast to TypedDict
|
|
978
|
+
v4Link: Dict[str, Any] = {}
|
|
979
|
+
|
|
980
|
+
for k, v in link.items():
|
|
981
|
+
if k == V1SpanLinkKeys.TRACE_ID:
|
|
982
|
+
if len(v) != 16:
|
|
983
|
+
raise TypeError("Trace ID must be 16 bytes, got %r." % len(v))
|
|
984
|
+
# trace_id is a 128 bit integer in a bytes array, so we need to get the last 64 bits
|
|
985
|
+
v4Link["trace_id"] = int.from_bytes(v[8:], "big")
|
|
986
|
+
v4Link["trace_id_high"] = int.from_bytes(v[:8], "big")
|
|
987
|
+
elif k == V1SpanLinkKeys.SPAN_ID:
|
|
988
|
+
v4Link["span_id"] = v
|
|
989
|
+
elif k == V1SpanLinkKeys.ATTRIBUTES:
|
|
990
|
+
v4Link["attributes"] = _convert_v1_span_link_attributes(v, string_table)
|
|
991
|
+
elif k == V1SpanLinkKeys.TRACE_STATE:
|
|
992
|
+
v4Link["tracestate"] = _get_and_add_string(string_table, v)
|
|
993
|
+
elif k == V1SpanLinkKeys.FLAGS:
|
|
994
|
+
v4Link["flags"] = v
|
|
995
|
+
else:
|
|
996
|
+
raise TypeError("Unknown key %r in v1 span link" % k)
|
|
997
|
+
|
|
998
|
+
# Cast to TypedDict
|
|
999
|
+
return v4Link # type: ignore
|
|
1000
|
+
|
|
1001
|
+
|
|
1002
|
+
def _convert_v1_span_link_attributes(attr: Any, string_table: List[str]) -> Dict[str, str]:
|
|
1003
|
+
"""
|
|
1004
|
+
Convert a v1 span link attributes to a v4 span link attributes. Unfortunately we need multiple implementations that
|
|
1005
|
+
convert "attributes" as the v0.4 representation of attributes is different between span links and span events.
|
|
1006
|
+
"""
|
|
1007
|
+
if not isinstance(attr, list):
|
|
1008
|
+
raise TypeError("Attribute must be a list, got type %r." % type(attr))
|
|
1009
|
+
if len(attr) % 3 != 0:
|
|
1010
|
+
raise TypeError("Attribute list must have a multiple of 3 elements, got %r." % len(attr))
|
|
1011
|
+
v4_attributes: Dict[str, str] = {}
|
|
1012
|
+
for i in range(0, len(attr), 3):
|
|
1013
|
+
key = _get_and_add_string(string_table, attr[i])
|
|
1014
|
+
value_type = attr[i + 1]
|
|
1015
|
+
value = attr[i + 2]
|
|
1016
|
+
if value_type == V1AnyValueKeys.STRING:
|
|
1017
|
+
v4_attributes[key] = _get_and_add_string(string_table, value)
|
|
1018
|
+
elif value_type == V1AnyValueKeys.BOOL:
|
|
1019
|
+
v4_attributes[key] = "true" if value else "false"
|
|
1020
|
+
elif value_type == V1AnyValueKeys.DOUBLE:
|
|
1021
|
+
v4_attributes[key] = str(value)
|
|
1022
|
+
elif value_type == V1AnyValueKeys.INT:
|
|
1023
|
+
v4_attributes[key] = str(value)
|
|
1024
|
+
elif value_type == V1AnyValueKeys.BYTES:
|
|
1025
|
+
raise NotImplementedError("Bytes values are not supported yet.")
|
|
1026
|
+
elif value_type == V1AnyValueKeys.ARRAY:
|
|
1027
|
+
raise NotImplementedError("Array of values are not supported yet.")
|
|
1028
|
+
elif value_type == V1AnyValueKeys.KEY_VALUE_LIST:
|
|
1029
|
+
raise NotImplementedError("Key value list values are not supported yet.")
|
|
1030
|
+
else:
|
|
1031
|
+
raise TypeError("Unknown attribute value type %r." % value_type)
|
|
1032
|
+
return v4_attributes
|
|
1033
|
+
|
|
1034
|
+
|
|
1035
|
+
def _convert_v1_span_event_attributes(attr: Any, string_table: List[str]) -> Dict[str, Dict[str, Any]]:
|
|
1036
|
+
"""
|
|
1037
|
+
Convert a v1 span event attributes to a v4 span event attributes. Unfortunately we need multiple implementations that
|
|
1038
|
+
convert "attributes" as the v0.4 representation of attributes is different between span links and span events.
|
|
1039
|
+
"""
|
|
1040
|
+
if not isinstance(attr, list):
|
|
1041
|
+
raise TypeError("Attribute must be a list, got type %r." % type(attr))
|
|
1042
|
+
if len(attr) % 3 != 0:
|
|
1043
|
+
raise TypeError("Attribute list must have a multiple of 3 elements, got %r." % len(attr))
|
|
1044
|
+
attributes: Dict[str, Dict[str, Any]] = {}
|
|
1045
|
+
for i in range(0, len(attr), 3):
|
|
1046
|
+
v4_attr_value: Dict[str, Any] = {}
|
|
1047
|
+
key = _get_and_add_string(string_table, attr[i])
|
|
1048
|
+
value_type = attr[i + 1]
|
|
1049
|
+
value = attr[i + 2]
|
|
1050
|
+
if value_type == V1AnyValueKeys.STRING:
|
|
1051
|
+
v4_attr_value["type"] = 0
|
|
1052
|
+
v4_attr_value["string_value"] = _get_and_add_string(string_table, value)
|
|
1053
|
+
elif value_type == V1AnyValueKeys.BOOL:
|
|
1054
|
+
v4_attr_value["type"] = 1
|
|
1055
|
+
v4_attr_value["bool_value"] = value
|
|
1056
|
+
elif value_type == V1AnyValueKeys.DOUBLE:
|
|
1057
|
+
v4_attr_value["type"] = 3
|
|
1058
|
+
v4_attr_value["double_value"] = value
|
|
1059
|
+
elif value_type == V1AnyValueKeys.INT:
|
|
1060
|
+
v4_attr_value["type"] = 2 # Yes the constants are different here
|
|
1061
|
+
v4_attr_value["int_value"] = value
|
|
1062
|
+
elif value_type == V1AnyValueKeys.BYTES:
|
|
1063
|
+
raise NotImplementedError("Bytes values are not supported yet.")
|
|
1064
|
+
elif value_type == V1AnyValueKeys.ARRAY:
|
|
1065
|
+
raise NotImplementedError("Array of strings values are not supported yet.")
|
|
1066
|
+
elif value_type == V1AnyValueKeys.KEY_VALUE_LIST:
|
|
1067
|
+
raise NotImplementedError("Key value list values are not supported yet.")
|
|
1068
|
+
else:
|
|
1069
|
+
raise TypeError("Unknown attribute value type %r." % value_type)
|
|
1070
|
+
attributes[key] = v4_attr_value
|
|
1071
|
+
return attributes
|
|
1072
|
+
|
|
1073
|
+
|
|
1074
|
+
def _convert_v1_attributes(
|
|
1075
|
+
attr: Any, meta: Dict[str, str], metrics: Dict[str, MetricType], string_table: List[str]
|
|
1076
|
+
) -> None:
|
|
1077
|
+
if not isinstance(attr, list):
|
|
1078
|
+
raise TypeError("Attribute must be a list, got type %r." % type(attr))
|
|
1079
|
+
if len(attr) % 3 != 0:
|
|
1080
|
+
raise TypeError("Attribute list must have a multiple of 3 elements, got %r." % len(attr))
|
|
1081
|
+
for i in range(0, len(attr), 3):
|
|
1082
|
+
key = _get_and_add_string(string_table, attr[i])
|
|
1083
|
+
value_type = attr[i + 1]
|
|
1084
|
+
value = attr[i + 2]
|
|
1085
|
+
if value_type == V1AnyValueKeys.STRING:
|
|
1086
|
+
meta[key] = _get_and_add_string(string_table, value)
|
|
1087
|
+
elif value_type == V1AnyValueKeys.BOOL:
|
|
1088
|
+
# Treat v1 boolean attributes as metrics with a value of 1 or 0
|
|
1089
|
+
metrics[key] = 1 if value else 0
|
|
1090
|
+
elif value_type == V1AnyValueKeys.DOUBLE:
|
|
1091
|
+
metrics[key] = value
|
|
1092
|
+
elif value_type == V1AnyValueKeys.INT:
|
|
1093
|
+
metrics[key] = value
|
|
1094
|
+
elif value_type == V1AnyValueKeys.BYTES:
|
|
1095
|
+
raise NotImplementedError("Bytes values are not supported yet.")
|
|
1096
|
+
elif value_type == V1AnyValueKeys.ARRAY:
|
|
1097
|
+
raise NotImplementedError("Array of strings values are not supported yet.")
|
|
1098
|
+
elif value_type == V1AnyValueKeys.KEY_VALUE_LIST:
|
|
1099
|
+
raise NotImplementedError("Key value list values are not supported yet.")
|
|
1100
|
+
else:
|
|
1101
|
+
raise TypeError("Unknown attribute value type %r." % value_type)
|
|
1102
|
+
|
|
1103
|
+
|
|
705
1104
|
def _verify_v07_payload(data: Any) -> v04TracePayload:
|
|
706
1105
|
if not isinstance(data, dict):
|
|
707
1106
|
raise TypeError("Trace payload must be a map, got type %r." % type(data))
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ddapm-test-agent
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.32.0
|
|
4
4
|
Summary: Test agent for Datadog APM client libraries
|
|
5
5
|
Home-page: https://github.com/Datadog/dd-apm-test-agent
|
|
6
6
|
Author: Kyle Verhoog
|
|
@@ -24,6 +24,9 @@ Requires-Dist: requests
|
|
|
24
24
|
Requires-Dist: typing_extensions
|
|
25
25
|
Requires-Dist: yarl
|
|
26
26
|
Requires-Dist: vcrpy
|
|
27
|
+
Requires-Dist: opentelemetry-proto<1.37.0,>1.33.0
|
|
28
|
+
Requires-Dist: protobuf>=3.19.0
|
|
29
|
+
Requires-Dist: grpcio<2.0,>=1.66.2
|
|
27
30
|
Provides-Extra: testing
|
|
28
31
|
Requires-Dist: ddtrace==3.11.0; extra == "testing"
|
|
29
32
|
Requires-Dist: pytest; extra == "testing"
|
|
@@ -69,13 +72,16 @@ The test agent can be installed from PyPI:
|
|
|
69
72
|
|
|
70
73
|
pip install ddapm-test-agent
|
|
71
74
|
|
|
72
|
-
|
|
75
|
+
# HTTP on port 8126, OTLP HTTP on port 4318, OTLP GRPC on port 4317
|
|
76
|
+
ddapm-test-agent --port=8126 --otlp-http-port=4318 --otlp-grpc-port=4317
|
|
73
77
|
|
|
74
78
|
or from Docker:
|
|
75
79
|
|
|
76
80
|
# Run the test agent and mount the snapshot directory
|
|
77
81
|
docker run --rm\
|
|
78
82
|
-p 8126:8126\
|
|
83
|
+
-p 4318:4318\
|
|
84
|
+
-p 4317:4317\
|
|
79
85
|
-e SNAPSHOT_CI=0\
|
|
80
86
|
-v $PWD/tests/snapshots:/snapshots\
|
|
81
87
|
ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:latest
|
|
@@ -422,6 +428,24 @@ Return stats that have been received by the agent for the given session token.
|
|
|
422
428
|
|
|
423
429
|
Stats are returned as a JSON list of the stats payloads received.
|
|
424
430
|
|
|
431
|
+
### /test/session/logs
|
|
432
|
+
|
|
433
|
+
Return OpenTelemetry logs that have been received by the agent for the given session token.
|
|
434
|
+
|
|
435
|
+
#### [optional] `?test_session_token=`
|
|
436
|
+
#### [optional] `X-Datadog-Test-Session-Token`
|
|
437
|
+
|
|
438
|
+
Logs are returned as a JSON list of the OTLP logs payloads received. The logs are in the standard OpenTelemetry Protocol (OTLP) v1.7.0 format, decoded from protobuf into JSON.
|
|
439
|
+
|
|
440
|
+
### /test/session/metrics
|
|
441
|
+
|
|
442
|
+
Return OpenTelemetry metrics that have been received by the agent for the given session token.
|
|
443
|
+
|
|
444
|
+
#### [optional] `?test_session_token=`
|
|
445
|
+
#### [optional] `X-Datadog-Test-Session-Token`
|
|
446
|
+
|
|
447
|
+
Metrics are returned as a JSON list of the OTLP metrics payloads received. The metrics are in the standard OpenTelemetry Protocol (OTLP) v1.7.0 format, decoded from protobuf into JSON.
|
|
448
|
+
|
|
425
449
|
### /test/session/responses/config (POST)
|
|
426
450
|
Create a Remote Config payload to retrieve in endpoint `/v0.7/config`
|
|
427
451
|
|
|
@@ -522,6 +546,35 @@ curl -X GET 'http://0.0.0.0:8126/test/integrations/tested_versions'
|
|
|
522
546
|
|
|
523
547
|
Mimics the pipeline_stats endpoint of the agent, but always returns OK, and logs a line everytime it's called.
|
|
524
548
|
|
|
549
|
+
### /v1/logs (HTTP)
|
|
550
|
+
|
|
551
|
+
Accepts OpenTelemetry Protocol (OTLP) v1.7.0 logs in protobuf format via HTTP. This endpoint validates and decodes OTLP logs payloads for testing OpenTelemetry logs exporters and libraries.
|
|
552
|
+
|
|
553
|
+
The HTTP endpoint accepts `POST` requests with `Content-Type: application/x-protobuf` and `Content-Type: application/json` and stores the decoded logs for retrieval via the `/test/session/logs` endpoint.
|
|
554
|
+
|
|
555
|
+
### /v1/metrics (HTTP)
|
|
556
|
+
|
|
557
|
+
Accepts OpenTelemetry Protocol (OTLP) v1.7.0 metrics in protobuf format via HTTP. This endpoint validates and decodes OTLP metrics payloads for testing OpenTelemetry metrics exporters and libraries.
|
|
558
|
+
|
|
559
|
+
The HTTP endpoint accepts `POST` requests with `Content-Type: application/x-protobuf` and `Content-Type: application/json` and stores the decoded metrics for retrieval via the `/test/session/metrics` endpoint.
|
|
560
|
+
|
|
561
|
+
### OTLP Logs and Metrics via GRPC
|
|
562
|
+
|
|
563
|
+
OTLP logs and metrics can also be sent via GRPC using the OpenTelemetry `LogsService.Export` and `MetricsService.Export` methods respectively. The GRPC server implements the standard OTLP service interfaces and forwards all requests to the HTTP server, ensuring consistent processing and session management.
|
|
564
|
+
|
|
565
|
+
**Note:** OTLP endpoints are served on separate ports from the main APM endpoints (default: 8126):
|
|
566
|
+
- **HTTP**: Port 4318 (default) - Use `--otlp-http-port` to configure
|
|
567
|
+
- **GRPC**: Port 4317 (default) - Use `--otlp-grpc-port` to configure
|
|
568
|
+
|
|
569
|
+
Both protocols store decoded data for retrieval via the `/test/session/logs` and `/test/session/metrics` HTTP endpoints respectively.
|
|
570
|
+
|
|
571
|
+
GRPC Client → GRPC Server → HTTP POST → HTTP Server → Agent Storage
|
|
572
|
+
↓ ↓
|
|
573
|
+
(forwards protobuf) (session management)
|
|
574
|
+
↓ ↓
|
|
575
|
+
HTTP Retrievable via
|
|
576
|
+
Response /test/session/{logs,metrics}
|
|
577
|
+
|
|
525
578
|
### /tracer_flare/v1
|
|
526
579
|
|
|
527
580
|
Mimics the tracer_flare endpoint of the agent. Returns OK if the flare contains the required form fields, otherwise `400`.
|
|
@@ -1,24 +1,26 @@
|
|
|
1
1
|
ddapm_test_agent/__init__.py,sha256=hJBQduemued6MAWH6Uk2gqJx2vb0_dd0UO52pJ83nLM,138
|
|
2
|
-
ddapm_test_agent/agent.py,sha256=
|
|
2
|
+
ddapm_test_agent/agent.py,sha256=S5VzW5yvBQKqsGj50Xqb_u8gXvRnZguwWTZXn6BcX-M,73960
|
|
3
3
|
ddapm_test_agent/apmtelemetry.py,sha256=w_9-yUDh1dgox-FfLqeOHU2C14GcjOjen-_SVagiZrc,861
|
|
4
4
|
ddapm_test_agent/checks.py,sha256=pBa4YKZQVA8qaTVJ_XgMA6TmlUZNh99YOrCFJA7fwo0,6865
|
|
5
|
-
ddapm_test_agent/client.py,sha256=
|
|
5
|
+
ddapm_test_agent/client.py,sha256=ViEmiRX9Y3SQ-KBhSc-FdzBmIVIe8Ij9jj-Q6VGyzLY,7359
|
|
6
6
|
ddapm_test_agent/cmd.py,sha256=UL8dVGBN4j77Nyx9EJrwE9LLmoPNgru81h9f1qrZLyc,2451
|
|
7
7
|
ddapm_test_agent/context.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
8
|
ddapm_test_agent/fmt.py,sha256=BarFfTI8bd_2gFBrRc40lKgJiUP3T7VMp4NQUeJ9ZRY,3675
|
|
9
9
|
ddapm_test_agent/integration.py,sha256=DbrPb6UvyIDSHcmKe6ZJVaieNeUXalb15yd0smvhgAA,265
|
|
10
|
+
ddapm_test_agent/logs.py,sha256=NDM-FPL52oCL1tL75XJ0xbBLIxyZkS99iZkQ-weOQns,2901
|
|
11
|
+
ddapm_test_agent/metrics.py,sha256=EZo7lSec2oAiH7tUqavKZ2MJM7TwbuFGE3AT3cXwmSM,3988
|
|
10
12
|
ddapm_test_agent/remoteconfig.py,sha256=_QjYUKc3JF31DxdvISDXgslm5WVnYWAw0hyckWuLc1c,3606
|
|
11
|
-
ddapm_test_agent/trace.py,sha256=
|
|
13
|
+
ddapm_test_agent/trace.py,sha256=t0OR8w3NcZK-EOOoadgPITiZqS5tAJGtxqLVGLEw7Kg,45816
|
|
12
14
|
ddapm_test_agent/trace_checks.py,sha256=bRg2eLKoHROXIFJRbujMUn0T3x1X8pZso-j8wXNomec,9972
|
|
13
15
|
ddapm_test_agent/trace_snapshot.py,sha256=g2MhKi8UE-Wsf6PtuzPoXymcW-cYRUnvj63SP9FETJs,22354
|
|
14
16
|
ddapm_test_agent/tracerflare.py,sha256=uoSjhPCOKZflgJn5JLv1Unh4gUdAR1-YbC9_1n1iH9w,954
|
|
15
17
|
ddapm_test_agent/tracestats.py,sha256=q_WQZnh2kXSSN3fRIBe_0jMYCBQHcaS3fZmJTge4lWc,2073
|
|
16
18
|
ddapm_test_agent/tracestats_snapshot.py,sha256=VsB6MVnHPjPWHVWnnDdCXJcVKL_izKXEf9lvJ0qbjNQ,3609
|
|
17
19
|
ddapm_test_agent/vcr_proxy.py,sha256=g6ix7laiS8Hqq9p14nkTMARhj5KMZmyRZjZpfFEMxOM,4973
|
|
18
|
-
ddapm_test_agent-1.
|
|
19
|
-
ddapm_test_agent-1.
|
|
20
|
-
ddapm_test_agent-1.
|
|
21
|
-
ddapm_test_agent-1.
|
|
22
|
-
ddapm_test_agent-1.
|
|
23
|
-
ddapm_test_agent-1.
|
|
24
|
-
ddapm_test_agent-1.
|
|
20
|
+
ddapm_test_agent-1.32.0.dist-info/licenses/LICENSE.BSD3,sha256=J9S_Tq-hhvteDV2W8R0rqht5DZHkmvgdx3gnLZg4j6Q,1493
|
|
21
|
+
ddapm_test_agent-1.32.0.dist-info/licenses/LICENSE.apache2,sha256=5V2RruBHZQIcPyceiv51DjjvdvhgsgS4pnXAOHDuZkQ,11342
|
|
22
|
+
ddapm_test_agent-1.32.0.dist-info/METADATA,sha256=mf922XgdhlbHNJk3P5jCiw5HBkmzfwpex-FDvqjHXa4,27493
|
|
23
|
+
ddapm_test_agent-1.32.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
24
|
+
ddapm_test_agent-1.32.0.dist-info/entry_points.txt,sha256=ulayVs6YJ-0Ej2kxbwn39wOHDVXbyQgFgsbRQmXydcs,250
|
|
25
|
+
ddapm_test_agent-1.32.0.dist-info/top_level.txt,sha256=A9jiKOrrg6VjFAk-mtlSVYN4wr0VsZe58ehGR6IW47U,17
|
|
26
|
+
ddapm_test_agent-1.32.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
{ddapm_test_agent-1.31.0.dist-info → ddapm_test_agent-1.32.0.dist-info}/licenses/LICENSE.BSD3
RENAMED
|
File without changes
|
{ddapm_test_agent-1.31.0.dist-info → ddapm_test_agent-1.32.0.dist-info}/licenses/LICENSE.apache2
RENAMED
|
File without changes
|
|
File without changes
|