dt-extensions-sdk 1.1.13__py3-none-any.whl → 1.1.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. {dt_extensions_sdk-1.1.13.dist-info → dt_extensions_sdk-1.1.15.dist-info}/METADATA +1 -1
  2. dt_extensions_sdk-1.1.15.dist-info/RECORD +33 -0
  3. {dt_extensions_sdk-1.1.13.dist-info → dt_extensions_sdk-1.1.15.dist-info}/licenses/LICENSE.txt +9 -9
  4. dynatrace_extension/__about__.py +5 -5
  5. dynatrace_extension/__init__.py +27 -27
  6. dynatrace_extension/cli/__init__.py +5 -5
  7. dynatrace_extension/cli/create/__init__.py +1 -1
  8. dynatrace_extension/cli/create/create.py +76 -76
  9. dynatrace_extension/cli/create/extension_template/.gitignore.template +160 -160
  10. dynatrace_extension/cli/create/extension_template/README.md.template +33 -33
  11. dynatrace_extension/cli/create/extension_template/activation.json.template +15 -15
  12. dynatrace_extension/cli/create/extension_template/extension/activationSchema.json.template +118 -118
  13. dynatrace_extension/cli/create/extension_template/extension/extension.yaml.template +17 -17
  14. dynatrace_extension/cli/create/extension_template/extension_name/__main__.py.template +43 -43
  15. dynatrace_extension/cli/create/extension_template/setup.py.template +28 -12
  16. dynatrace_extension/cli/main.py +428 -428
  17. dynatrace_extension/cli/schema.py +129 -129
  18. dynatrace_extension/sdk/__init__.py +3 -3
  19. dynatrace_extension/sdk/activation.py +43 -43
  20. dynatrace_extension/sdk/callback.py +134 -141
  21. dynatrace_extension/sdk/communication.py +518 -469
  22. dynatrace_extension/sdk/event.py +19 -19
  23. dynatrace_extension/sdk/extension.py +1040 -1037
  24. dynatrace_extension/sdk/helper.py +191 -191
  25. dynatrace_extension/sdk/metric.py +118 -118
  26. dynatrace_extension/sdk/runtime.py +67 -67
  27. dynatrace_extension/sdk/vendor/mureq/LICENSE +13 -13
  28. dynatrace_extension/sdk/vendor/mureq/mureq.py +447 -447
  29. dt_extensions_sdk-1.1.13.dist-info/RECORD +0 -33
  30. {dt_extensions_sdk-1.1.13.dist-info → dt_extensions_sdk-1.1.15.dist-info}/WHEEL +0 -0
  31. {dt_extensions_sdk-1.1.13.dist-info → dt_extensions_sdk-1.1.15.dist-info}/entry_points.txt +0 -0
@@ -1,469 +1,518 @@
1
- # SPDX-FileCopyrightText: 2023-present Dynatrace LLC
2
- #
3
- # SPDX-License-Identifier: MIT
4
-
5
- from __future__ import annotations
6
-
7
- import json
8
- import logging
9
- import random
10
- import sys
11
- import time
12
- from abc import ABC, abstractmethod
13
- from dataclasses import dataclass
14
- from enum import Enum
15
- from itertools import islice
16
- from pathlib import Path
17
- from typing import Any, Iterable, List, TypeVar
18
-
19
- from .vendor.mureq.mureq import HTTPException, Response, request
20
-
21
- CONTENT_TYPE_JSON = "application/json;charset=utf-8"
22
- CONTENT_TYPE_PLAIN = "text/plain;charset=utf-8"
23
- COUNT_METRIC_ITEMS_DICT = TypeVar("COUNT_METRIC_ITEMS_DICT", str, List[str])
24
- MAX_MINT_LINES_PER_REQUEST = 1000
25
- HTTP_BAD_REQUEST = 400
26
-
27
-
28
- class StatusValue(Enum):
29
- EMPTY = ""
30
- OK = "OK"
31
- GENERIC_ERROR = "GENERIC_ERROR"
32
- INVALID_ARGS_ERROR = "INVALID_ARGS_ERROR"
33
- EEC_CONNECTION_ERROR = "EEC_CONNECTION_ERROR"
34
- INVALID_CONFIG_ERROR = "INVALID_CONFIG_ERROR"
35
- AUTHENTICATION_ERROR = "AUTHENTICATION_ERROR"
36
- DEVICE_CONNECTION_ERROR = "DEVICE_CONNECTION_ERROR"
37
- UNKNOWN_ERROR = "UNKNOWN_ERROR"
38
-
39
-
40
- class Status:
41
- def __init__(self, status: StatusValue = StatusValue.EMPTY, message: str = "", timestamp: int | None = None):
42
- self.status = status
43
- self.message = message
44
- self.timestamp = timestamp
45
-
46
- def to_json(self) -> dict:
47
- status = {"status": self.status.value, "message": self.message}
48
- if self.timestamp:
49
- status["timestamp"] = self.timestamp # type: ignore
50
- return status
51
-
52
- def __repr__(self):
53
- return json.dumps(self.to_json())
54
-
55
- def is_error(self) -> bool:
56
- return self.status not in (StatusValue.OK, StatusValue.EMPTY)
57
-
58
-
59
- class CommunicationClient(ABC):
60
- """
61
- Abstract class for extension communication
62
- """
63
-
64
- @abstractmethod
65
- def get_activation_config(self) -> dict:
66
- pass
67
-
68
- @abstractmethod
69
- def get_extension_config(self) -> str:
70
- pass
71
-
72
- @abstractmethod
73
- def get_feature_sets(self) -> dict[str, list[str]]:
74
- pass
75
-
76
- @abstractmethod
77
- def register_count_metrics(self, pattern: dict[str, dict[str, COUNT_METRIC_ITEMS_DICT]]) -> None:
78
- pass
79
-
80
- @abstractmethod
81
- def send_count_delta_signal(self, metric_keys: set[str]) -> None:
82
- pass
83
-
84
- @abstractmethod
85
- def send_status(self, status: Status) -> dict:
86
- pass
87
-
88
- @abstractmethod
89
- def send_keep_alive(self) -> str:
90
- pass
91
-
92
- @abstractmethod
93
- def send_metrics(self, mint_lines: list[str]) -> list[MintResponse]:
94
- pass
95
-
96
- @abstractmethod
97
- def send_events(self, event: dict | list[dict], eec_enrichment: bool) -> dict | None:
98
- pass
99
-
100
- @abstractmethod
101
- def send_sfm_metrics(self, metrics: list[str]) -> MintResponse:
102
- pass
103
-
104
- @abstractmethod
105
- def get_cluster_time_diff(self) -> int:
106
- pass
107
-
108
- @abstractmethod
109
- def send_dt_event(self, event: dict) -> None:
110
- pass
111
-
112
-
113
- class HttpClient(CommunicationClient):
114
- """
115
- Concrete implementation of the client, this one handles the communication with the EEC
116
- """
117
-
118
- def __init__(self, base_url: str, datasource_id: str, id_token_file_path: str, logger: logging.Logger):
119
- self._activation_config_url = f"{base_url}/userconfig/{datasource_id}"
120
- self._extension_config_url = f"{base_url}/extconfig/{datasource_id}"
121
- self._metric_url = f"{base_url}/mint/{datasource_id}"
122
- self._sfm_url = f"{base_url}/sfm/{datasource_id}"
123
- self._keep_alive_url = f"{base_url}/alive/{datasource_id}"
124
- self._timediff_url = f"{base_url}/timediffms"
125
- self._events_url = f"{base_url}/logs/{datasource_id}"
126
- self._count_metric_register_url = f"{base_url}/countmetricregister/{datasource_id}"
127
- self._count_delta_signal_url = f"{base_url}/countmetricdeltasignal/{datasource_id}"
128
- self._feature_sets_query = "?feature_sets_json"
129
- self._event_ingest_url = f"{base_url}/events/{datasource_id}"
130
-
131
- with open(id_token_file_path) as f:
132
- id_token = f.read()
133
- self._headers = {"Authorization": f"Api-Token {id_token}"}
134
-
135
- self.logger = logger
136
-
137
- def _make_request(
138
- self,
139
- url: str,
140
- method: str = "GET",
141
- body: Any = None,
142
- extra_headers: dict | None = None,
143
- is_delta_signal: bool = False,
144
- ) -> Response:
145
- if extra_headers is None:
146
- extra_headers = {}
147
- headers = {**self._headers, **extra_headers}
148
-
149
- response = request(method, url, body=body, headers=headers)
150
- self.logger.debug(f"Response from {url}: {response}")
151
- if response.status_code >= HTTP_BAD_REQUEST:
152
- if not is_delta_signal:
153
- self.logger.warning(f"Error HTTP {response.status_code} from {url}: {response.content}")
154
- return response
155
-
156
- def get_activation_config(self) -> dict:
157
- try:
158
- response = self._make_request(self._activation_config_url, "GET")
159
- except HTTPException as err:
160
- self.logger.error(f"HTTP exception: {err}")
161
- return {}
162
-
163
- if response.status_code < HTTP_BAD_REQUEST:
164
- try:
165
- return response.json()
166
- except Exception as err:
167
- self.logger.error(f"JSON parse failure: {err}")
168
- return {}
169
- else:
170
- self.logger.error(f"Can't get activation configuration ({response.content}). Extension is stopped.")
171
- sys.exit(1)
172
-
173
- def get_extension_config(self) -> str:
174
- try:
175
- response = self._make_request(self._extension_config_url, "GET")
176
- return response.content.decode("utf-8")
177
- except HTTPException as err:
178
- self.logger.error(f"HTTP exception: {err}")
179
- return ""
180
-
181
- def get_feature_sets(self) -> dict[str, list[str]]:
182
- try:
183
- response = self._make_request(self._extension_config_url + self._feature_sets_query, "GET")
184
- except HTTPException as err:
185
- self.logger.error(f"HTTP exception: {err}")
186
- return {}
187
-
188
- if response.status_code < HTTP_BAD_REQUEST:
189
- try:
190
- return response.json()
191
- except Exception as err:
192
- self.logger.error(f"JSON parse failure: {err}")
193
- return {}
194
-
195
- return {}
196
-
197
- def register_count_metrics(self, json_pattern: dict[str, dict[str, COUNT_METRIC_ITEMS_DICT]]) -> None:
198
- register_data = json.dumps(json_pattern).encode("utf-8")
199
- try:
200
- response = self._make_request(
201
- self._count_metric_register_url,
202
- "POST",
203
- register_data,
204
- extra_headers={"Content-Type": CONTENT_TYPE_JSON},
205
- )
206
- if response.ok:
207
- self.logger.debug(
208
- f"Monotonic cache converter successful registration for metric {list(json_pattern.keys())}."
209
- )
210
- except HTTPException:
211
- self.logger.error(
212
- f"Monotonic cache converter registration request error for metric {list(json_pattern.keys())}."
213
- )
214
-
215
- def send_count_delta_signal(self, metric_keys: set[str]) -> None:
216
- json_data = {"metric_keys": list(metric_keys), "filter_dimensions": {}}
217
- delta_signal_data = json.dumps(json_data).encode("utf-8")
218
- try:
219
- response = self._make_request(
220
- self._count_delta_signal_url,
221
- "POST",
222
- delta_signal_data,
223
- extra_headers={"Content-Type": CONTENT_TYPE_JSON},
224
- is_delta_signal=True,
225
- )
226
- if response.ok:
227
- self.logger.debug(
228
- f"Monotonic converter cache delta calculation signal success for metric {metric_keys}."
229
- )
230
- else:
231
- self.logger.debug(
232
- f"Not enough metrics of type {metric_keys} cached in monotonic cache converter to calculate delta."
233
- )
234
- except HTTPException:
235
- self.logger.error(
236
- f"Monotonic cache converter delta calculation signal request error for metric {metric_keys}."
237
- )
238
-
239
- def send_dt_event(self, event: dict[str, str | int | dict[str, str]]):
240
- json_data = json.dumps(event).encode("utf-8")
241
- try:
242
- response = self._make_request(
243
- self._event_ingest_url, "POST", json_data, extra_headers={"Content-Type": CONTENT_TYPE_JSON}
244
- )
245
- if response.ok:
246
- self.logger.debug(f"DT Event sent to EEC, content: {json_data.decode('utf-8')}")
247
- else:
248
- self.logger.debug(f"DT Event request failed: {response.content}")
249
- except HTTPException:
250
- self.logger.error(f"DT Event request HTTP exception, request body: {json_data.decode('utf-8')}")
251
-
252
- def send_status(self, status: Status) -> dict:
253
- encoded_data = json.dumps(status.to_json()).encode("utf-8")
254
- self.logger.debug(f"Sending status to EEC: {status}")
255
- response = self._make_request(
256
- self._keep_alive_url, "POST", encoded_data, extra_headers={"Content-Type": CONTENT_TYPE_JSON}
257
- ).content
258
- return json.loads(response.decode("utf-8"))
259
-
260
- def send_keep_alive(self):
261
- return self.send_status(Status())
262
-
263
- def send_metrics(self, mint_lines: list[str]) -> list[MintResponse]:
264
- total_lines = len(mint_lines)
265
- lines_sent = 0
266
-
267
- self.logger.debug(f"Start sending {total_lines} metrics to the EEC")
268
- responses = []
269
-
270
- # We divide into chunks of MAX_MINT_LINES_PER_REQUEST lines to avoid hitting the body size limit
271
- chunks = divide_into_chunks(mint_lines, MAX_MINT_LINES_PER_REQUEST)
272
-
273
- for chunk in chunks:
274
- lines_in_chunk = len(chunk)
275
- lines_sent += lines_in_chunk
276
- self.logger.debug(f"Sending chunk with {lines_in_chunk} metric lines. ({lines_sent}/{total_lines})")
277
- mint_data = "\n".join(chunk).encode("utf-8")
278
- response = self._make_request(
279
- self._metric_url, "POST", mint_data, extra_headers={"Content-Type": CONTENT_TYPE_PLAIN}
280
- ).json()
281
- self.logger.debug(f"{self._metric_url}: {response}")
282
- mint_response = MintResponse.from_json(response)
283
- responses.append(mint_response)
284
- return responses
285
-
286
- def send_events(self, events: dict | list[dict], eec_enrichment: bool = True) -> dict | None:
287
- self.logger.debug(f"Sending log events: {events}")
288
- event_data = json.dumps(events).encode("utf-8")
289
- try:
290
- # EEC returns empty body on success
291
- return self._make_request(
292
- self._events_url,
293
- "POST",
294
- event_data,
295
- extra_headers={"Content-Type": CONTENT_TYPE_JSON, "eec-enrichment": str(eec_enrichment).lower()},
296
- ).json()
297
- except json.JSONDecodeError:
298
- return None
299
-
300
- def send_sfm_metrics(self, mint_lines: list[str]) -> MintResponse:
301
- mint_data = "\n".join(mint_lines).encode("utf-8")
302
- return MintResponse.from_json(
303
- self._make_request(
304
- self._sfm_url, "POST", mint_data, extra_headers={"Content-Type": CONTENT_TYPE_PLAIN}
305
- ).json()
306
- )
307
-
308
- def get_cluster_time_diff(self) -> int:
309
- response = self._make_request(self._timediff_url, "GET")
310
- time_diff = response.json()["clusterDiffMs"]
311
- return time_diff
312
-
313
-
314
- class DebugClient(CommunicationClient):
315
- """
316
- This client is used for debugging purposes
317
- It does not send metrics to Dynatrace, but prints them to the console
318
- """
319
-
320
- def __init__(
321
- self,
322
- activation_config_path: str,
323
- extension_config_path: str,
324
- logger: logging.Logger,
325
- local_ingest: bool = False,
326
- local_ingest_port: int = 14499,
327
- print_metrics: bool = True
328
- ):
329
- self.activation_config = {}
330
- if activation_config_path and Path(activation_config_path).exists():
331
- with open(activation_config_path) as f:
332
- self.activation_config = json.load(f)
333
-
334
- self.extension_config = ""
335
- if not extension_config_path:
336
- extension_config_path = "extension/extension.yaml"
337
- if Path(extension_config_path).exists():
338
- with open(extension_config_path) as f:
339
- self.extension_config = f.read()
340
- self.logger = logger
341
- self.local_ingest = local_ingest
342
- self.local_ingest_port = local_ingest_port
343
- self.print_metrics = print_metrics
344
-
345
- def get_activation_config(self) -> dict:
346
- return self.activation_config
347
-
348
- def get_extension_config(self) -> str:
349
- return self.extension_config
350
-
351
- def get_feature_sets(self) -> dict[str, list[str]]:
352
- # This is only called from dt-sdk run, where PyYaml is installed because of dt-cli
353
- # Do NOT move this to the top of the file
354
- import yaml # type: ignore
355
-
356
- # Grab the feature sets from the extension.yaml file
357
- extension_yaml = yaml.safe_load(self.extension_config)
358
- if not extension_yaml:
359
- return {}
360
-
361
- yaml_feature_sets = extension_yaml.get("python", {}).get("featureSets", [])
362
- if not yaml_feature_sets:
363
- return {}
364
-
365
- # Construct the object that the SDK expects
366
- feature_sets = {}
367
- for feature_set in yaml_feature_sets:
368
- feature_set_name = feature_set["featureSet"]
369
- if feature_set_name in self.activation_config.get("featureSets", []):
370
- feature_sets[feature_set_name] = [metric["key"] for metric in feature_set["metrics"]]
371
-
372
- return feature_sets
373
-
374
- def register_count_metrics(self, pattern: dict[str, dict[str, COUNT_METRIC_ITEMS_DICT]]) -> None:
375
- self.logger.info(f"Registering metrics in converter: {pattern}")
376
-
377
- def send_count_delta_signal(self, metric_keys: set[str]) -> None:
378
- self.logger.info(f"Sending delta signal for: {metric_keys}")
379
-
380
- def send_dt_event(self, event: dict) -> None:
381
- self.logger.info(f"Sending DT Event: {event}")
382
-
383
- def send_status(self, status: Status) -> dict:
384
- self.logger.info(f"send_status: '{status}'")
385
- return {}
386
-
387
- def send_keep_alive(self):
388
- return self.send_status(Status())
389
-
390
- def send_metrics(self, mint_lines: list[str]) -> list[MintResponse]:
391
- total_lines = len(mint_lines)
392
- lines_sent = 0
393
-
394
- self.logger.info(f"Start sending {total_lines} metrics to the EEC")
395
-
396
- responses = []
397
-
398
- chunks = divide_into_chunks(mint_lines, MAX_MINT_LINES_PER_REQUEST)
399
- for chunk in chunks:
400
- lines_in_chunk = len(chunk)
401
- lines_sent += lines_in_chunk
402
- self.logger.debug(f"Sending chunk with {lines_in_chunk} metric lines. ({lines_sent}/{total_lines})")
403
-
404
- if self.local_ingest:
405
- mint_data = "\n".join(chunk).encode("utf-8")
406
- response = request(
407
- "POST",
408
- f"http://localhost:{self.local_ingest_port}/metrics/ingest",
409
- body=mint_data,
410
- headers={"Content-Type": CONTENT_TYPE_PLAIN},
411
- ).json()
412
- mint_response = MintResponse.from_json(response)
413
- responses.append(mint_response)
414
- else:
415
- if self.print_metrics:
416
- for line in mint_lines:
417
- self.logger.info(f"send_metric: {line}")
418
-
419
- response = MintResponse(lines_invalid=0, lines_ok=len(chunk), error=None, warnings=None)
420
- responses.append(response)
421
- return responses
422
-
423
- def send_events(self, events: dict | list[dict], eec_enrichment: bool = True) -> dict | None:
424
- self.logger.info(f"send_events (enrichment = {eec_enrichment}): {events}")
425
- return None
426
-
427
- def send_sfm_metrics(self, mint_lines: list[str]) -> MintResponse:
428
- for line in mint_lines:
429
- self.logger.info(f"send_sfm_metric: {line}")
430
- return MintResponse(lines_invalid=0, lines_ok=len(mint_lines), error=None, warnings=None)
431
-
432
- def get_cluster_time_diff(self) -> int:
433
- return 0
434
-
435
-
436
- def divide_into_chunks(iterable: Iterable, chunk_size: int) -> Iterable:
437
- """
438
- Yield successive n-sized chunks from iterable.
439
- Example: _chunk([1, 2, 3, 4, 5, 6, 7, 8, 9], 3) -> [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
440
-
441
- :param iterable: The iterable to chunk
442
- :param chunk_size: The size of the chunks
443
- """
444
- iterator = iter(iterable)
445
- while True:
446
- subset = list(islice(iterator, chunk_size))
447
- if not subset:
448
- return
449
- yield subset
450
-
451
-
452
- @dataclass
453
- class MintResponse:
454
- lines_ok: int
455
- lines_invalid: int
456
- error: dict | None
457
- warnings: dict | None
458
-
459
- @staticmethod
460
- def from_json(json_data: dict) -> MintResponse:
461
- return MintResponse(
462
- lines_ok=json_data.get("linesOk", 0),
463
- lines_invalid=json_data.get("linesInvalid", 0),
464
- error=json_data.get("error"),
465
- warnings=json_data.get("warnings"),
466
- )
467
-
468
- def __str__(self) -> str:
469
- return f"MintResponse(lines_ok={self.lines_ok}, lines_invalid={self.lines_invalid}, error={self.error}, warnings={self.warnings})"
1
+ # SPDX-FileCopyrightText: 2023-present Dynatrace LLC
2
+ #
3
+ # SPDX-License-Identifier: MIT
4
+
5
+ from __future__ import annotations
6
+
7
+ import json
8
+ import logging
9
+ import random
10
+ import sys
11
+ import time
12
+ from abc import ABC, abstractmethod
13
+ from collections import deque
14
+ from dataclasses import dataclass
15
+ from enum import Enum
16
+ from itertools import islice
17
+ from pathlib import Path
18
+ from typing import Any, Iterable, List, TypeVar, Union
19
+
20
+ from .vendor.mureq.mureq import HTTPException, Response, request
21
+
22
+ CONTENT_TYPE_JSON = "application/json;charset=utf-8"
23
+ CONTENT_TYPE_PLAIN = "text/plain;charset=utf-8"
24
+ COUNT_METRIC_ITEMS_DICT = TypeVar("COUNT_METRIC_ITEMS_DICT", str, List[str])
25
+ MAX_MINT_LINES_PER_REQUEST = 1000
26
+ MAX_LOG_EVENTS_PER_REQUEST = 50_000
27
+ MAX_LOG_REQUEST_SIZE = 5_000_000
28
+ HTTP_BAD_REQUEST = 400
29
+
30
+
31
+ class StatusValue(Enum):
32
+ EMPTY = ""
33
+ OK = "OK"
34
+ GENERIC_ERROR = "GENERIC_ERROR"
35
+ INVALID_ARGS_ERROR = "INVALID_ARGS_ERROR"
36
+ EEC_CONNECTION_ERROR = "EEC_CONNECTION_ERROR"
37
+ INVALID_CONFIG_ERROR = "INVALID_CONFIG_ERROR"
38
+ AUTHENTICATION_ERROR = "AUTHENTICATION_ERROR"
39
+ DEVICE_CONNECTION_ERROR = "DEVICE_CONNECTION_ERROR"
40
+ UNKNOWN_ERROR = "UNKNOWN_ERROR"
41
+
42
+
43
+ class Status:
44
+ def __init__(self, status: StatusValue = StatusValue.EMPTY, message: str = "", timestamp: int | None = None):
45
+ self.status = status
46
+ self.message = message
47
+ self.timestamp = timestamp
48
+
49
+ def to_json(self) -> dict:
50
+ status = {"status": self.status.value, "message": self.message}
51
+ if self.timestamp:
52
+ status["timestamp"] = self.timestamp # type: ignore
53
+ return status
54
+
55
+ def __repr__(self):
56
+ return json.dumps(self.to_json())
57
+
58
+ def is_error(self) -> bool:
59
+ return self.status not in (StatusValue.OK, StatusValue.EMPTY)
60
+
61
+
62
+ class CommunicationClient(ABC):
63
+ """
64
+ Abstract class for extension communication
65
+ """
66
+
67
+ @abstractmethod
68
+ def get_activation_config(self) -> dict:
69
+ pass
70
+
71
+ @abstractmethod
72
+ def get_extension_config(self) -> str:
73
+ pass
74
+
75
+ @abstractmethod
76
+ def get_feature_sets(self) -> dict[str, list[str]]:
77
+ pass
78
+
79
+ @abstractmethod
80
+ def register_count_metrics(self, pattern: dict[str, dict[str, COUNT_METRIC_ITEMS_DICT]]) -> None:
81
+ pass
82
+
83
+ @abstractmethod
84
+ def send_count_delta_signal(self, metric_keys: set[str]) -> None:
85
+ pass
86
+
87
+ @abstractmethod
88
+ def send_status(self, status: Status) -> dict:
89
+ pass
90
+
91
+ @abstractmethod
92
+ def send_keep_alive(self) -> str:
93
+ pass
94
+
95
+ @abstractmethod
96
+ def send_metrics(self, mint_lines: list[str]) -> list[MintResponse]:
97
+ pass
98
+
99
+ @abstractmethod
100
+ def send_events(self, event: dict | list[dict], eec_enrichment: bool) -> List[Union[dict | None]]:
101
+ pass
102
+
103
+ @abstractmethod
104
+ def send_sfm_metrics(self, metrics: list[str]) -> MintResponse:
105
+ pass
106
+
107
+ @abstractmethod
108
+ def get_cluster_time_diff(self) -> int:
109
+ pass
110
+
111
+ @abstractmethod
112
+ def send_dt_event(self, event: dict) -> None:
113
+ pass
114
+
115
+
116
+ class HttpClient(CommunicationClient):
117
+ """
118
+ Concrete implementation of the client, this one handles the communication with the EEC
119
+ """
120
+
121
+ def __init__(self, base_url: str, datasource_id: str, id_token_file_path: str, logger: logging.Logger):
122
+ self._activation_config_url = f"{base_url}/userconfig/{datasource_id}"
123
+ self._extension_config_url = f"{base_url}/extconfig/{datasource_id}"
124
+ self._metric_url = f"{base_url}/mint/{datasource_id}"
125
+ self._sfm_url = f"{base_url}/sfm/{datasource_id}"
126
+ self._keep_alive_url = f"{base_url}/alive/{datasource_id}"
127
+ self._timediff_url = f"{base_url}/timediffms"
128
+ self._events_url = f"{base_url}/logs/{datasource_id}"
129
+ self._count_metric_register_url = f"{base_url}/countmetricregister/{datasource_id}"
130
+ self._count_delta_signal_url = f"{base_url}/countmetricdeltasignal/{datasource_id}"
131
+ self._feature_sets_query = "?feature_sets_json"
132
+ self._event_ingest_url = f"{base_url}/events/{datasource_id}"
133
+
134
+ with open(id_token_file_path) as f:
135
+ id_token = f.read()
136
+ self._headers = {"Authorization": f"Api-Token {id_token}"}
137
+
138
+ self.logger = logger
139
+
140
+ def _make_request(
141
+ self,
142
+ url: str,
143
+ method: str = "GET",
144
+ body: Any = None,
145
+ extra_headers: dict | None = None,
146
+ is_delta_signal: bool = False,
147
+ ) -> Response:
148
+ if extra_headers is None:
149
+ extra_headers = {}
150
+ headers = {**self._headers, **extra_headers}
151
+
152
+ response = request(method, url, body=body, headers=headers)
153
+ self.logger.debug(f"Response from {url}: {response}")
154
+ if response.status_code >= HTTP_BAD_REQUEST:
155
+ if not is_delta_signal:
156
+ self.logger.warning(f"Error HTTP {response.status_code} from {url}: {response.content}")
157
+ return response
158
+
159
+ def get_activation_config(self) -> dict:
160
+ try:
161
+ response = self._make_request(self._activation_config_url, "GET")
162
+ except HTTPException as err:
163
+ self.logger.error(f"HTTP exception: {err}")
164
+ return {}
165
+
166
+ if response.status_code < HTTP_BAD_REQUEST:
167
+ try:
168
+ return response.json()
169
+ except Exception as err:
170
+ self.logger.error(f"JSON parse failure: {err}")
171
+ return {}
172
+ else:
173
+ self.logger.error(f"Can't get activation configuration ({response.content}). Extension is stopped.")
174
+ sys.exit(1)
175
+
176
+ def get_extension_config(self) -> str:
177
+ try:
178
+ response = self._make_request(self._extension_config_url, "GET")
179
+ return response.content.decode("utf-8")
180
+ except HTTPException as err:
181
+ self.logger.error(f"HTTP exception: {err}")
182
+ return ""
183
+
184
+ def get_feature_sets(self) -> dict[str, list[str]]:
185
+ try:
186
+ response = self._make_request(self._extension_config_url + self._feature_sets_query, "GET")
187
+ except HTTPException as err:
188
+ self.logger.error(f"HTTP exception: {err}")
189
+ return {}
190
+
191
+ if response.status_code < HTTP_BAD_REQUEST:
192
+ try:
193
+ return response.json()
194
+ except Exception as err:
195
+ self.logger.error(f"JSON parse failure: {err}")
196
+ return {}
197
+
198
+ return {}
199
+
200
+ def register_count_metrics(self, json_pattern: dict[str, dict[str, COUNT_METRIC_ITEMS_DICT]]) -> None:
201
+ register_data = json.dumps(json_pattern).encode("utf-8")
202
+ try:
203
+ response = self._make_request(
204
+ self._count_metric_register_url,
205
+ "POST",
206
+ register_data,
207
+ extra_headers={"Content-Type": CONTENT_TYPE_JSON},
208
+ )
209
+ if response.ok:
210
+ self.logger.debug(
211
+ f"Monotonic cache converter successful registration for metric {list(json_pattern.keys())}."
212
+ )
213
+ except HTTPException:
214
+ self.logger.error(
215
+ f"Monotonic cache converter registration request error for metric {list(json_pattern.keys())}."
216
+ )
217
+
218
+ def send_count_delta_signal(self, metric_keys: set[str]) -> None:
219
+ json_data = {"metric_keys": list(metric_keys), "filter_dimensions": {}}
220
+ delta_signal_data = json.dumps(json_data).encode("utf-8")
221
+ try:
222
+ response = self._make_request(
223
+ self._count_delta_signal_url,
224
+ "POST",
225
+ delta_signal_data,
226
+ extra_headers={"Content-Type": CONTENT_TYPE_JSON},
227
+ is_delta_signal=True,
228
+ )
229
+ if response.ok:
230
+ self.logger.debug(
231
+ f"Monotonic converter cache delta calculation signal success for metric {metric_keys}."
232
+ )
233
+ else:
234
+ self.logger.debug(
235
+ f"Not enough metrics of type {metric_keys} cached in monotonic cache converter to calculate delta."
236
+ )
237
+ except HTTPException:
238
+ self.logger.error(
239
+ f"Monotonic cache converter delta calculation signal request error for metric {metric_keys}."
240
+ )
241
+
242
+ def send_dt_event(self, event: dict[str, str | int | dict[str, str]]):
243
+ json_data = json.dumps(event).encode("utf-8")
244
+ try:
245
+ response = self._make_request(
246
+ self._event_ingest_url, "POST", json_data, extra_headers={"Content-Type": CONTENT_TYPE_JSON}
247
+ )
248
+ if response.ok:
249
+ self.logger.debug(f"DT Event sent to EEC, content: {json_data.decode('utf-8')}")
250
+ else:
251
+ self.logger.debug(f"DT Event request failed: {response.content}")
252
+ except HTTPException:
253
+ self.logger.error(f"DT Event request HTTP exception, request body: {json_data.decode('utf-8')}")
254
+
255
+ def send_status(self, status: Status) -> dict:
256
+ encoded_data = json.dumps(status.to_json()).encode("utf-8")
257
+ self.logger.debug(f"Sending status to EEC: {status}")
258
+ response = self._make_request(
259
+ self._keep_alive_url, "POST", encoded_data, extra_headers={"Content-Type": CONTENT_TYPE_JSON}
260
+ ).content
261
+ return json.loads(response.decode("utf-8"))
262
+
263
+ def send_keep_alive(self):
264
+ return self.send_status(Status())
265
+
266
+ def send_metrics(self, mint_lines: list[str]) -> list[MintResponse]:
267
+ total_lines = len(mint_lines)
268
+ lines_sent = 0
269
+
270
+ self.logger.debug(f"Start sending {total_lines} metrics to the EEC")
271
+ responses = []
272
+
273
+ # We divide into chunks of MAX_MINT_LINES_PER_REQUEST lines to avoid hitting the body size limit
274
+ chunks = divide_into_chunks(mint_lines, MAX_MINT_LINES_PER_REQUEST)
275
+
276
+ for chunk in chunks:
277
+ lines_in_chunk = len(chunk)
278
+ lines_sent += lines_in_chunk
279
+ self.logger.debug(f"Sending chunk with {lines_in_chunk} metric lines. ({lines_sent}/{total_lines})")
280
+ mint_data = "\n".join(chunk).encode("utf-8")
281
+ response = self._make_request(
282
+ self._metric_url, "POST", mint_data, extra_headers={"Content-Type": CONTENT_TYPE_PLAIN}
283
+ ).json()
284
+ self.logger.debug(f"{self._metric_url}: {response}")
285
+ mint_response = MintResponse.from_json(response)
286
+ responses.append(mint_response)
287
+ return responses
288
+
289
+ def send_events(self, events: dict | list[dict], eec_enrichment: bool = True) -> List[Union[dict, None]]:
290
+ self.logger.debug(f"Sending log events: {events}")
291
+
292
+ responses = []
293
+ batches = divide_logs_into_batches([events] if type(events) == dict else events)
294
+
295
+ for batch in batches:
296
+ try:
297
+ # EEC returns empty body on success
298
+ responses.append(self._make_request(
299
+ self._events_url,
300
+ "POST",
301
+ batch,
302
+ extra_headers={"Content-Type": CONTENT_TYPE_JSON, "eec-enrichment": str(eec_enrichment).lower()},
303
+ ).json())
304
+ except json.JSONDecodeError:
305
+ responses.append(None)
306
+
307
+ return responses
308
+
309
+ def send_sfm_metrics(self, mint_lines: list[str]) -> MintResponse:
310
+ mint_data = "\n".join(mint_lines).encode("utf-8")
311
+ return MintResponse.from_json(
312
+ self._make_request(
313
+ self._sfm_url, "POST", mint_data, extra_headers={"Content-Type": CONTENT_TYPE_PLAIN}
314
+ ).json()
315
+ )
316
+
317
+ def get_cluster_time_diff(self) -> int:
318
+ response = self._make_request(self._timediff_url, "GET")
319
+ time_diff = response.json()["clusterDiffMs"]
320
+ return time_diff
321
+
322
+
323
+ class DebugClient(CommunicationClient):
324
+ """
325
+ This client is used for debugging purposes
326
+ It does not send metrics to Dynatrace, but prints them to the console
327
+ """
328
+
329
+ def __init__(
330
+ self,
331
+ activation_config_path: str,
332
+ extension_config_path: str,
333
+ logger: logging.Logger,
334
+ local_ingest: bool = False,
335
+ local_ingest_port: int = 14499,
336
+ print_metrics: bool = True
337
+ ):
338
+ self.activation_config = {}
339
+ if activation_config_path and Path(activation_config_path).exists():
340
+ with open(activation_config_path) as f:
341
+ self.activation_config = json.load(f)
342
+
343
+ self.extension_config = ""
344
+ if not extension_config_path:
345
+ extension_config_path = "extension/extension.yaml"
346
+ if Path(extension_config_path).exists():
347
+ with open(extension_config_path) as f:
348
+ self.extension_config = f.read()
349
+ self.logger = logger
350
+ self.local_ingest = local_ingest
351
+ self.local_ingest_port = local_ingest_port
352
+ self.print_metrics = print_metrics
353
+
354
+ def get_activation_config(self) -> dict:
355
+ return self.activation_config
356
+
357
+ def get_extension_config(self) -> str:
358
+ return self.extension_config
359
+
360
+ def get_feature_sets(self) -> dict[str, list[str]]:
361
+ # This is only called from dt-sdk run, where PyYaml is installed because of dt-cli
362
+ # Do NOT move this to the top of the file
363
+ import yaml # type: ignore
364
+
365
+ # Grab the feature sets from the extension.yaml file
366
+ extension_yaml = yaml.safe_load(self.extension_config)
367
+ if not extension_yaml:
368
+ return {}
369
+
370
+ yaml_feature_sets = extension_yaml.get("python", {}).get("featureSets", [])
371
+ if not yaml_feature_sets:
372
+ return {}
373
+
374
+ # Construct the object that the SDK expects
375
+ feature_sets = {}
376
+ for feature_set in yaml_feature_sets:
377
+ feature_set_name = feature_set["featureSet"]
378
+ if feature_set_name in self.activation_config.get("featureSets", []):
379
+ feature_sets[feature_set_name] = [metric["key"] for metric in feature_set["metrics"]]
380
+
381
+ return feature_sets
382
+
383
+ def register_count_metrics(self, pattern: dict[str, dict[str, COUNT_METRIC_ITEMS_DICT]]) -> None:
384
+ self.logger.info(f"Registering metrics in converter: {pattern}")
385
+
386
+ def send_count_delta_signal(self, metric_keys: set[str]) -> None:
387
+ self.logger.info(f"Sending delta signal for: {metric_keys}")
388
+
389
+ def send_dt_event(self, event: dict) -> None:
390
+ self.logger.info(f"Sending DT Event: {event}")
391
+
392
+ def send_status(self, status: Status) -> dict:
393
+ self.logger.info(f"send_status: '{status}'")
394
+ return {}
395
+
396
+ def send_keep_alive(self):
397
+ return self.send_status(Status())
398
+
399
+ def send_metrics(self, mint_lines: list[str]) -> list[MintResponse]:
400
+ total_lines = len(mint_lines)
401
+ lines_sent = 0
402
+
403
+ self.logger.info(f"Start sending {total_lines} metrics to the EEC")
404
+
405
+ responses = []
406
+
407
+ chunks = divide_into_chunks(mint_lines, MAX_MINT_LINES_PER_REQUEST)
408
+ for chunk in chunks:
409
+ lines_in_chunk = len(chunk)
410
+ lines_sent += lines_in_chunk
411
+ self.logger.debug(f"Sending chunk with {lines_in_chunk} metric lines. ({lines_sent}/{total_lines})")
412
+
413
+ if self.local_ingest:
414
+ mint_data = "\n".join(chunk).encode("utf-8")
415
+ response = request(
416
+ "POST",
417
+ f"http://localhost:{self.local_ingest_port}/metrics/ingest",
418
+ body=mint_data,
419
+ headers={"Content-Type": CONTENT_TYPE_PLAIN},
420
+ ).json()
421
+ mint_response = MintResponse.from_json(response)
422
+ responses.append(mint_response)
423
+ else:
424
+ if self.print_metrics:
425
+ for line in mint_lines:
426
+ self.logger.info(f"send_metric: {line}")
427
+
428
+ response = MintResponse(lines_invalid=0, lines_ok=len(chunk), error=None, warnings=None)
429
+ responses.append(response)
430
+ return responses
431
+
432
+ def send_events(self, events: dict | list[dict], eec_enrichment: bool = True) -> dict | None:
433
+ self.logger.info(f"send_events (enrichment = {eec_enrichment}): {events}")
434
+ return None
435
+
436
+ def send_sfm_metrics(self, mint_lines: list[str]) -> MintResponse:
437
+ for line in mint_lines:
438
+ self.logger.info(f"send_sfm_metric: {line}")
439
+ return MintResponse(lines_invalid=0, lines_ok=len(mint_lines), error=None, warnings=None)
440
+
441
+ def get_cluster_time_diff(self) -> int:
442
+ return 0
443
+
444
+
445
+ def divide_into_chunks(iterable: Iterable, chunk_size: int) -> Iterable:
446
+ """
447
+ Yield successive n-sized chunks from iterable.
448
+ Example: _chunk([1, 2, 3, 4, 5, 6, 7, 8, 9], 3) -> [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
449
+
450
+ :param iterable: The iterable to chunk
451
+ :param chunk_size: The size of the chunks
452
+ """
453
+ iterator = iter(iterable)
454
+ while True:
455
+ subset = list(islice(iterator, chunk_size))
456
+ if not subset:
457
+ return
458
+ yield subset
459
+
460
+ def divide_logs_into_batches(logs: List[dict]):
461
+ """
462
+ Yield successive batches from a list of log events, according to sizing limitations
463
+ imposed by the EEC: 5 MB payload, 50,000 events
464
+
465
+ :param logs: The list of log events
466
+ """
467
+ events_left = len(logs)
468
+ events = deque(logs)
469
+
470
+ batch = []
471
+ batch_size = 0
472
+ batch_items = 0
473
+
474
+ while events_left > 0:
475
+ if batch_items == MAX_LOG_EVENTS_PER_REQUEST:
476
+ yield batch
477
+ batch = []
478
+ batch_size = 0
479
+ batch_items = 0
480
+ continue
481
+
482
+ event = events.popleft()
483
+ events_left -= 1
484
+
485
+ if event is not None:
486
+ event = json.dumps(event).encode("utf-8")
487
+ event_size = len(event)
488
+
489
+ if batch_size + event_size >= MAX_LOG_REQUEST_SIZE:
490
+ yield batch
491
+ batch = [event]
492
+ batch_size = event_size
493
+ batch_items = 1
494
+ else:
495
+ batch.append(event)
496
+ batch_size += event_size
497
+ batch_items += 1
498
+ else:
499
+ yield batch
500
+
501
+ @dataclass
502
+ class MintResponse:
503
+ lines_ok: int
504
+ lines_invalid: int
505
+ error: dict | None
506
+ warnings: dict | None
507
+
508
+ @staticmethod
509
+ def from_json(json_data: dict) -> MintResponse:
510
+ return MintResponse(
511
+ lines_ok=json_data.get("linesOk", 0),
512
+ lines_invalid=json_data.get("linesInvalid", 0),
513
+ error=json_data.get("error"),
514
+ warnings=json_data.get("warnings"),
515
+ )
516
+
517
+ def __str__(self) -> str:
518
+ return f"MintResponse(lines_ok={self.lines_ok}, lines_invalid={self.lines_invalid}, error={self.error}, warnings={self.warnings})"