dt-extensions-sdk 1.1.17__py3-none-any.whl → 1.1.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. {dt_extensions_sdk-1.1.17.dist-info → dt_extensions_sdk-1.1.19.dist-info}/METADATA +2 -2
  2. dt_extensions_sdk-1.1.19.dist-info/RECORD +33 -0
  3. {dt_extensions_sdk-1.1.17.dist-info → dt_extensions_sdk-1.1.19.dist-info}/WHEEL +1 -1
  4. {dt_extensions_sdk-1.1.17.dist-info → dt_extensions_sdk-1.1.19.dist-info}/licenses/LICENSE.txt +9 -9
  5. dynatrace_extension/__about__.py +5 -5
  6. dynatrace_extension/__init__.py +27 -27
  7. dynatrace_extension/cli/__init__.py +5 -5
  8. dynatrace_extension/cli/create/__init__.py +1 -1
  9. dynatrace_extension/cli/create/create.py +76 -76
  10. dynatrace_extension/cli/create/extension_template/.gitignore.template +160 -160
  11. dynatrace_extension/cli/create/extension_template/README.md.template +33 -33
  12. dynatrace_extension/cli/create/extension_template/activation.json.template +15 -15
  13. dynatrace_extension/cli/create/extension_template/extension/activationSchema.json.template +118 -118
  14. dynatrace_extension/cli/create/extension_template/extension/extension.yaml.template +17 -17
  15. dynatrace_extension/cli/create/extension_template/extension_name/__main__.py.template +43 -43
  16. dynatrace_extension/cli/create/extension_template/setup.py.template +28 -28
  17. dynatrace_extension/cli/main.py +428 -428
  18. dynatrace_extension/cli/schema.py +129 -129
  19. dynatrace_extension/sdk/__init__.py +3 -3
  20. dynatrace_extension/sdk/activation.py +43 -43
  21. dynatrace_extension/sdk/callback.py +134 -134
  22. dynatrace_extension/sdk/communication.py +521 -519
  23. dynatrace_extension/sdk/event.py +19 -19
  24. dynatrace_extension/sdk/extension.py +1045 -1045
  25. dynatrace_extension/sdk/helper.py +191 -191
  26. dynatrace_extension/sdk/metric.py +118 -118
  27. dynatrace_extension/sdk/runtime.py +67 -67
  28. dynatrace_extension/sdk/vendor/mureq/LICENSE +13 -13
  29. dynatrace_extension/sdk/vendor/mureq/mureq.py +447 -447
  30. dt_extensions_sdk-1.1.17.dist-info/RECORD +0 -33
  31. {dt_extensions_sdk-1.1.17.dist-info → dt_extensions_sdk-1.1.19.dist-info}/entry_points.txt +0 -0
@@ -1,519 +1,521 @@
1
- # SPDX-FileCopyrightText: 2023-present Dynatrace LLC
2
- #
3
- # SPDX-License-Identifier: MIT
4
-
5
- from __future__ import annotations
6
-
7
- import json
8
- import logging
9
- import random
10
- import sys
11
- import time
12
- from abc import ABC, abstractmethod
13
- from collections import deque
14
- from dataclasses import dataclass
15
- from enum import Enum
16
- from itertools import islice
17
- from pathlib import Path
18
- from typing import Any, Dict, Iterable, List, TypeVar, Union
19
-
20
- from .vendor.mureq.mureq import HTTPException, Response, request
21
-
22
- CONTENT_TYPE_JSON = "application/json;charset=utf-8"
23
- CONTENT_TYPE_PLAIN = "text/plain;charset=utf-8"
24
- COUNT_METRIC_ITEMS_DICT = TypeVar("COUNT_METRIC_ITEMS_DICT", str, List[str])
25
- MAX_MINT_LINES_PER_REQUEST = 1000
26
- MAX_LOG_EVENTS_PER_REQUEST = 50_000
27
- MAX_LOG_REQUEST_SIZE = 5_000_000
28
- HTTP_BAD_REQUEST = 400
29
-
30
-
31
- class StatusValue(Enum):
32
- EMPTY = ""
33
- OK = "OK"
34
- GENERIC_ERROR = "GENERIC_ERROR"
35
- INVALID_ARGS_ERROR = "INVALID_ARGS_ERROR"
36
- EEC_CONNECTION_ERROR = "EEC_CONNECTION_ERROR"
37
- INVALID_CONFIG_ERROR = "INVALID_CONFIG_ERROR"
38
- AUTHENTICATION_ERROR = "AUTHENTICATION_ERROR"
39
- DEVICE_CONNECTION_ERROR = "DEVICE_CONNECTION_ERROR"
40
- UNKNOWN_ERROR = "UNKNOWN_ERROR"
41
-
42
-
43
- class Status:
44
- def __init__(self, status: StatusValue = StatusValue.EMPTY, message: str = "", timestamp: int | None = None):
45
- self.status = status
46
- self.message = message
47
- self.timestamp = timestamp
48
-
49
- def to_json(self) -> dict:
50
- status = {"status": self.status.value, "message": self.message}
51
- if self.timestamp:
52
- status["timestamp"] = self.timestamp # type: ignore
53
- return status
54
-
55
- def __repr__(self):
56
- return json.dumps(self.to_json())
57
-
58
- def is_error(self) -> bool:
59
- return self.status not in (StatusValue.OK, StatusValue.EMPTY)
60
-
61
-
62
- class CommunicationClient(ABC):
63
- """
64
- Abstract class for extension communication
65
- """
66
-
67
- @abstractmethod
68
- def get_activation_config(self) -> dict:
69
- pass
70
-
71
- @abstractmethod
72
- def get_extension_config(self) -> str:
73
- pass
74
-
75
- @abstractmethod
76
- def get_feature_sets(self) -> dict[str, list[str]]:
77
- pass
78
-
79
- @abstractmethod
80
- def register_count_metrics(self, pattern: dict[str, dict[str, COUNT_METRIC_ITEMS_DICT]]) -> None:
81
- pass
82
-
83
- @abstractmethod
84
- def send_count_delta_signal(self, metric_keys: set[str]) -> None:
85
- pass
86
-
87
- @abstractmethod
88
- def send_status(self, status: Status) -> dict:
89
- pass
90
-
91
- @abstractmethod
92
- def send_keep_alive(self) -> str:
93
- pass
94
-
95
- @abstractmethod
96
- def send_metrics(self, mint_lines: list[str]) -> list[MintResponse]:
97
- pass
98
-
99
- @abstractmethod
100
- def send_events(self, event: dict | list[dict], eec_enrichment: bool) -> list[Union[dict | None]]:
101
- pass
102
-
103
- @abstractmethod
104
- def send_sfm_metrics(self, metrics: list[str]) -> MintResponse:
105
- pass
106
-
107
- @abstractmethod
108
- def get_cluster_time_diff(self) -> int:
109
- pass
110
-
111
- @abstractmethod
112
- def send_dt_event(self, event: dict) -> None:
113
- pass
114
-
115
-
116
- class HttpClient(CommunicationClient):
117
- """
118
- Concrete implementation of the client, this one handles the communication with the EEC
119
- """
120
-
121
- def __init__(self, base_url: str, datasource_id: str, id_token_file_path: str, logger: logging.Logger):
122
- self._activation_config_url = f"{base_url}/userconfig/{datasource_id}"
123
- self._extension_config_url = f"{base_url}/extconfig/{datasource_id}"
124
- self._metric_url = f"{base_url}/mint/{datasource_id}"
125
- self._sfm_url = f"{base_url}/sfm/{datasource_id}"
126
- self._keep_alive_url = f"{base_url}/alive/{datasource_id}"
127
- self._timediff_url = f"{base_url}/timediffms"
128
- self._events_url = f"{base_url}/logs/{datasource_id}"
129
- self._count_metric_register_url = f"{base_url}/countmetricregister/{datasource_id}"
130
- self._count_delta_signal_url = f"{base_url}/countmetricdeltasignal/{datasource_id}"
131
- self._feature_sets_query = "?feature_sets_json"
132
- self._event_ingest_url = f"{base_url}/events/{datasource_id}"
133
-
134
- with open(id_token_file_path) as f:
135
- id_token = f.read()
136
- self._headers = {"Authorization": f"Api-Token {id_token}"}
137
-
138
- self.logger = logger
139
-
140
- def _make_request(
141
- self,
142
- url: str,
143
- method: str = "GET",
144
- body: Any = None,
145
- extra_headers: dict | None = None,
146
- is_delta_signal: bool = False,
147
- ) -> Response:
148
- if extra_headers is None:
149
- extra_headers = {}
150
- headers = {**self._headers, **extra_headers}
151
-
152
- response = request(method, url, body=body, headers=headers)
153
- self.logger.debug(f"Response from {url}: {response}")
154
- if response.status_code >= HTTP_BAD_REQUEST:
155
- if not is_delta_signal:
156
- self.logger.warning(f"Error HTTP {response.status_code} from {url}: {response.content}")
157
- return response
158
-
159
- def get_activation_config(self) -> dict:
160
- try:
161
- response = self._make_request(self._activation_config_url, "GET")
162
- except HTTPException as err:
163
- self.logger.error(f"HTTP exception: {err}")
164
- return {}
165
-
166
- if response.status_code < HTTP_BAD_REQUEST:
167
- try:
168
- return response.json()
169
- except Exception as err:
170
- self.logger.error(f"JSON parse failure: {err}")
171
- return {}
172
- else:
173
- self.logger.error(f"Can't get activation configuration ({response.content}). Extension is stopped.")
174
- sys.exit(1)
175
-
176
- def get_extension_config(self) -> str:
177
- try:
178
- response = self._make_request(self._extension_config_url, "GET")
179
- return response.content.decode("utf-8")
180
- except HTTPException as err:
181
- self.logger.error(f"HTTP exception: {err}")
182
- return ""
183
-
184
- def get_feature_sets(self) -> dict[str, list[str]]:
185
- try:
186
- response = self._make_request(self._extension_config_url + self._feature_sets_query, "GET")
187
- except HTTPException as err:
188
- self.logger.error(f"HTTP exception: {err}")
189
- return {}
190
-
191
- if response.status_code < HTTP_BAD_REQUEST:
192
- try:
193
- return response.json()
194
- except Exception as err:
195
- self.logger.error(f"JSON parse failure: {err}")
196
- return {}
197
-
198
- return {}
199
-
200
- def register_count_metrics(self, json_pattern: dict[str, dict[str, COUNT_METRIC_ITEMS_DICT]]) -> None:
201
- register_data = json.dumps(json_pattern).encode("utf-8")
202
- try:
203
- response = self._make_request(
204
- self._count_metric_register_url,
205
- "POST",
206
- register_data,
207
- extra_headers={"Content-Type": CONTENT_TYPE_JSON},
208
- )
209
- if response.ok:
210
- self.logger.debug(
211
- f"Monotonic cache converter successful registration for metric {list(json_pattern.keys())}."
212
- )
213
- except HTTPException:
214
- self.logger.error(
215
- f"Monotonic cache converter registration request error for metric {list(json_pattern.keys())}."
216
- )
217
-
218
- def send_count_delta_signal(self, metric_keys: set[str]) -> None:
219
- json_data = {"metric_keys": list(metric_keys), "filter_dimensions": {}}
220
- delta_signal_data = json.dumps(json_data).encode("utf-8")
221
- try:
222
- response = self._make_request(
223
- self._count_delta_signal_url,
224
- "POST",
225
- delta_signal_data,
226
- extra_headers={"Content-Type": CONTENT_TYPE_JSON},
227
- is_delta_signal=True,
228
- )
229
- if response.ok:
230
- self.logger.debug(
231
- f"Monotonic converter cache delta calculation signal success for metric {metric_keys}."
232
- )
233
- else:
234
- self.logger.debug(
235
- f"Not enough metrics of type {metric_keys} cached in monotonic cache converter to calculate delta."
236
- )
237
- except HTTPException:
238
- self.logger.error(
239
- f"Monotonic cache converter delta calculation signal request error for metric {metric_keys}."
240
- )
241
-
242
- def send_dt_event(self, event: dict[str, str | int | dict[str, str]]):
243
- json_data = json.dumps(event).encode("utf-8")
244
- try:
245
- response = self._make_request(
246
- self._event_ingest_url, "POST", json_data, extra_headers={"Content-Type": CONTENT_TYPE_JSON}
247
- )
248
- if response.ok:
249
- self.logger.debug(f"DT Event sent to EEC, content: {json_data.decode('utf-8')}")
250
- else:
251
- self.logger.debug(f"DT Event request failed: {response.content}")
252
- except HTTPException:
253
- self.logger.error(f"DT Event request HTTP exception, request body: {json_data.decode('utf-8')}")
254
-
255
- def send_status(self, status: Status) -> dict:
256
- encoded_data = json.dumps(status.to_json()).encode("utf-8")
257
- self.logger.debug(f"Sending status to EEC: {status}")
258
- response = self._make_request(
259
- self._keep_alive_url, "POST", encoded_data, extra_headers={"Content-Type": CONTENT_TYPE_JSON}
260
- ).content
261
- return json.loads(response.decode("utf-8"))
262
-
263
- def send_keep_alive(self):
264
- return self.send_status(Status())
265
-
266
- def send_metrics(self, mint_lines: list[str]) -> list[MintResponse]:
267
- total_lines = len(mint_lines)
268
- lines_sent = 0
269
-
270
- self.logger.debug(f"Start sending {total_lines} metrics to the EEC")
271
- responses = []
272
-
273
- # We divide into chunks of MAX_MINT_LINES_PER_REQUEST lines to avoid hitting the body size limit
274
- chunks = divide_into_chunks(mint_lines, MAX_MINT_LINES_PER_REQUEST)
275
-
276
- for chunk in chunks:
277
- lines_in_chunk = len(chunk)
278
- lines_sent += lines_in_chunk
279
- self.logger.debug(f"Sending chunk with {lines_in_chunk} metric lines. ({lines_sent}/{total_lines})")
280
- mint_data = "\n".join(chunk).encode("utf-8")
281
- response = self._make_request(
282
- self._metric_url, "POST", mint_data, extra_headers={"Content-Type": CONTENT_TYPE_PLAIN}
283
- ).json()
284
- self.logger.debug(f"{self._metric_url}: {response}")
285
- mint_response = MintResponse.from_json(response)
286
- responses.append(mint_response)
287
- return responses
288
-
289
- def send_events(self, events: dict | list[dict], eec_enrichment: bool = True) -> list[dict | None]:
290
- self.logger.debug(f"Sending log events: {events}")
291
-
292
- responses = []
293
- batches = divide_logs_into_batches([events] if isinstance(events, dict) else events)
294
-
295
- for batch in batches:
296
- try:
297
- encoded_batch = json.dumps(batch).encode("utf-8")
298
- eec_response = self._make_request(
299
- self._events_url,
300
- "POST",
301
- encoded_batch,
302
- extra_headers={"Content-Type": CONTENT_TYPE_JSON, "eec-enrichment": str(eec_enrichment).lower()},
303
- ).json()
304
- responses.append(eec_response)
305
- except json.JSONDecodeError:
306
- responses.append(None)
307
-
308
- return responses
309
-
310
- def send_sfm_metrics(self, mint_lines: list[str]) -> MintResponse:
311
- mint_data = "\n".join(mint_lines).encode("utf-8")
312
- return MintResponse.from_json(
313
- self._make_request(
314
- self._sfm_url, "POST", mint_data, extra_headers={"Content-Type": CONTENT_TYPE_PLAIN}
315
- ).json()
316
- )
317
-
318
- def get_cluster_time_diff(self) -> int:
319
- response = self._make_request(self._timediff_url, "GET")
320
- time_diff = response.json()["clusterDiffMs"]
321
- return time_diff
322
-
323
-
324
- class DebugClient(CommunicationClient):
325
- """
326
- This client is used for debugging purposes
327
- It does not send metrics to Dynatrace, but prints them to the console
328
- """
329
-
330
- def __init__(
331
- self,
332
- activation_config_path: str,
333
- extension_config_path: str,
334
- logger: logging.Logger,
335
- local_ingest: bool = False,
336
- local_ingest_port: int = 14499,
337
- print_metrics: bool = True
338
- ):
339
- self.activation_config = {}
340
- if activation_config_path and Path(activation_config_path).exists():
341
- with open(activation_config_path) as f:
342
- self.activation_config = json.load(f)
343
-
344
- self.extension_config = ""
345
- if not extension_config_path:
346
- extension_config_path = "extension/extension.yaml"
347
- if Path(extension_config_path).exists():
348
- with open(extension_config_path) as f:
349
- self.extension_config = f.read()
350
- self.logger = logger
351
- self.local_ingest = local_ingest
352
- self.local_ingest_port = local_ingest_port
353
- self.print_metrics = print_metrics
354
-
355
- def get_activation_config(self) -> dict:
356
- return self.activation_config
357
-
358
- def get_extension_config(self) -> str:
359
- return self.extension_config
360
-
361
- def get_feature_sets(self) -> dict[str, list[str]]:
362
- # This is only called from dt-sdk run, where PyYaml is installed because of dt-cli
363
- # Do NOT move this to the top of the file
364
- import yaml # type: ignore
365
-
366
- # Grab the feature sets from the extension.yaml file
367
- extension_yaml = yaml.safe_load(self.extension_config)
368
- if not extension_yaml:
369
- return {}
370
-
371
- yaml_feature_sets = extension_yaml.get("python", {}).get("featureSets", [])
372
- if not yaml_feature_sets:
373
- return {}
374
-
375
- # Construct the object that the SDK expects
376
- feature_sets = {}
377
- for feature_set in yaml_feature_sets:
378
- feature_set_name = feature_set["featureSet"]
379
- if feature_set_name in self.activation_config.get("featureSets", []):
380
- feature_sets[feature_set_name] = [metric["key"] for metric in feature_set["metrics"]]
381
-
382
- return feature_sets
383
-
384
- def register_count_metrics(self, pattern: dict[str, dict[str, COUNT_METRIC_ITEMS_DICT]]) -> None:
385
- self.logger.info(f"Registering metrics in converter: {pattern}")
386
-
387
- def send_count_delta_signal(self, metric_keys: set[str]) -> None:
388
- self.logger.info(f"Sending delta signal for: {metric_keys}")
389
-
390
- def send_dt_event(self, event: dict) -> None:
391
- self.logger.info(f"Sending DT Event: {event}")
392
-
393
- def send_status(self, status: Status) -> dict:
394
- self.logger.info(f"send_status: '{status}'")
395
- return {}
396
-
397
- def send_keep_alive(self):
398
- return self.send_status(Status())
399
-
400
- def send_metrics(self, mint_lines: list[str]) -> list[MintResponse]:
401
- total_lines = len(mint_lines)
402
- lines_sent = 0
403
-
404
- self.logger.info(f"Start sending {total_lines} metrics to the EEC")
405
-
406
- responses = []
407
-
408
- chunks = divide_into_chunks(mint_lines, MAX_MINT_LINES_PER_REQUEST)
409
- for chunk in chunks:
410
- lines_in_chunk = len(chunk)
411
- lines_sent += lines_in_chunk
412
- self.logger.debug(f"Sending chunk with {lines_in_chunk} metric lines. ({lines_sent}/{total_lines})")
413
-
414
- if self.local_ingest:
415
- mint_data = "\n".join(chunk).encode("utf-8")
416
- response = request(
417
- "POST",
418
- f"http://localhost:{self.local_ingest_port}/metrics/ingest",
419
- body=mint_data,
420
- headers={"Content-Type": CONTENT_TYPE_PLAIN},
421
- ).json()
422
- mint_response = MintResponse.from_json(response)
423
- responses.append(mint_response)
424
- else:
425
- if self.print_metrics:
426
- for line in mint_lines:
427
- self.logger.info(f"send_metric: {line}")
428
-
429
- response = MintResponse(lines_invalid=0, lines_ok=len(chunk), error=None, warnings=None)
430
- responses.append(response)
431
- return responses
432
-
433
- def send_events(self, events: dict | list[dict], eec_enrichment: bool = True) -> dict | None:
434
- self.logger.info(f"send_events (enrichment = {eec_enrichment}): {events}")
435
- return None
436
-
437
- def send_sfm_metrics(self, mint_lines: list[str]) -> MintResponse:
438
- for line in mint_lines:
439
- self.logger.info(f"send_sfm_metric: {line}")
440
- return MintResponse(lines_invalid=0, lines_ok=len(mint_lines), error=None, warnings=None)
441
-
442
- def get_cluster_time_diff(self) -> int:
443
- return 0
444
-
445
-
446
- def divide_into_chunks(iterable: Iterable, chunk_size: int) -> Iterable:
447
- """
448
- Yield successive n-sized chunks from iterable.
449
- Example: _chunk([1, 2, 3, 4, 5, 6, 7, 8, 9], 3) -> [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
450
-
451
- :param iterable: The iterable to chunk
452
- :param chunk_size: The size of the chunks
453
- """
454
- iterator = iter(iterable)
455
- while True:
456
- subset = list(islice(iterator, chunk_size))
457
- if not subset:
458
- return
459
- yield subset
460
-
461
- def divide_logs_into_batches(logs: list[dict]):
462
- """
463
- Yield successive batches from a list of log events, according to sizing limitations
464
- imposed by the EEC: 5 MB payload, 50,000 events
465
-
466
- :param logs: The list of log events
467
- """
468
- events_left = len(logs)
469
- events = deque(logs)
470
-
471
- batch = []
472
- batch_size = 0
473
- batch_items = 0
474
-
475
- while events_left > 0:
476
- if batch_items == MAX_LOG_EVENTS_PER_REQUEST:
477
- yield batch
478
- batch = []
479
- batch_size = 0
480
- batch_items = 0
481
- continue
482
-
483
- event = events.popleft()
484
- events_left -= 1
485
-
486
- if event is not None:
487
- event = json.dumps(event).encode("utf-8")
488
- event_size = len(event)
489
-
490
- if batch_size + event_size >= MAX_LOG_REQUEST_SIZE:
491
- yield batch
492
- batch = [event]
493
- batch_size = event_size
494
- batch_items = 1
495
- else:
496
- batch.append(event)
497
- batch_size += event_size
498
- batch_items += 1
499
- else:
500
- yield batch
501
-
502
- @dataclass
503
- class MintResponse:
504
- lines_ok: int
505
- lines_invalid: int
506
- error: dict | None
507
- warnings: dict | None
508
-
509
- @staticmethod
510
- def from_json(json_data: dict) -> MintResponse:
511
- return MintResponse(
512
- lines_ok=json_data.get("linesOk", 0),
513
- lines_invalid=json_data.get("linesInvalid", 0),
514
- error=json_data.get("error"),
515
- warnings=json_data.get("warnings"),
516
- )
517
-
518
- def __str__(self) -> str:
519
- return f"MintResponse(lines_ok={self.lines_ok}, lines_invalid={self.lines_invalid}, error={self.error}, warnings={self.warnings})"
1
+ # SPDX-FileCopyrightText: 2023-present Dynatrace LLC
2
+ #
3
+ # SPDX-License-Identifier: MIT
4
+
5
+ from __future__ import annotations
6
+
7
+ import json
8
+ import logging
9
+ import random
10
+ import sys
11
+ import time
12
+ from abc import ABC, abstractmethod
13
+ from collections import deque
14
+ from dataclasses import dataclass
15
+ from enum import Enum
16
+ from itertools import islice
17
+ from pathlib import Path
18
+ from typing import Any, Dict, Iterable, List, TypeVar, Union
19
+
20
+ from .vendor.mureq.mureq import HTTPException, Response, request
21
+
22
+ CONTENT_TYPE_JSON = "application/json;charset=utf-8"
23
+ CONTENT_TYPE_PLAIN = "text/plain;charset=utf-8"
24
+ COUNT_METRIC_ITEMS_DICT = TypeVar("COUNT_METRIC_ITEMS_DICT", str, List[str])
25
+ MAX_MINT_LINES_PER_REQUEST = 1000
26
+ MAX_LOG_EVENTS_PER_REQUEST = 50_000
27
+ MAX_LOG_REQUEST_SIZE = 5_000_000
28
+ HTTP_BAD_REQUEST = 400
29
+
30
+
31
+ class StatusValue(Enum):
32
+ EMPTY = ""
33
+ OK = "OK"
34
+ GENERIC_ERROR = "GENERIC_ERROR"
35
+ INVALID_ARGS_ERROR = "INVALID_ARGS_ERROR"
36
+ EEC_CONNECTION_ERROR = "EEC_CONNECTION_ERROR"
37
+ INVALID_CONFIG_ERROR = "INVALID_CONFIG_ERROR"
38
+ AUTHENTICATION_ERROR = "AUTHENTICATION_ERROR"
39
+ DEVICE_CONNECTION_ERROR = "DEVICE_CONNECTION_ERROR"
40
+ UNKNOWN_ERROR = "UNKNOWN_ERROR"
41
+
42
+
43
+ class Status:
44
+ def __init__(self, status: StatusValue = StatusValue.EMPTY, message: str = "", timestamp: int | None = None):
45
+ self.status = status
46
+ self.message = message
47
+ self.timestamp = timestamp
48
+
49
+ def to_json(self) -> dict:
50
+ status = {"status": self.status.value, "message": self.message}
51
+ if self.timestamp:
52
+ status["timestamp"] = self.timestamp # type: ignore
53
+ return status
54
+
55
+ def __repr__(self):
56
+ return json.dumps(self.to_json())
57
+
58
+ def is_error(self) -> bool:
59
+ return self.status not in (StatusValue.OK, StatusValue.EMPTY)
60
+
61
+
62
+ class CommunicationClient(ABC):
63
+ """
64
+ Abstract class for extension communication
65
+ """
66
+
67
+ @abstractmethod
68
+ def get_activation_config(self) -> dict:
69
+ pass
70
+
71
+ @abstractmethod
72
+ def get_extension_config(self) -> str:
73
+ pass
74
+
75
+ @abstractmethod
76
+ def get_feature_sets(self) -> dict[str, list[str]]:
77
+ pass
78
+
79
+ @abstractmethod
80
+ def register_count_metrics(self, pattern: dict[str, dict[str, COUNT_METRIC_ITEMS_DICT]]) -> None:
81
+ pass
82
+
83
+ @abstractmethod
84
+ def send_count_delta_signal(self, metric_keys: set[str]) -> None:
85
+ pass
86
+
87
+ @abstractmethod
88
+ def send_status(self, status: Status) -> dict:
89
+ pass
90
+
91
+ @abstractmethod
92
+ def send_keep_alive(self) -> str:
93
+ pass
94
+
95
+ @abstractmethod
96
+ def send_metrics(self, mint_lines: list[str]) -> list[MintResponse]:
97
+ pass
98
+
99
+ @abstractmethod
100
+ def send_events(self, event: dict | list[dict], eec_enrichment: bool) -> list[Union[dict | None]]:
101
+ pass
102
+
103
+ @abstractmethod
104
+ def send_sfm_metrics(self, metrics: list[str]) -> MintResponse:
105
+ pass
106
+
107
+ @abstractmethod
108
+ def get_cluster_time_diff(self) -> int:
109
+ pass
110
+
111
+ @abstractmethod
112
+ def send_dt_event(self, event: dict) -> None:
113
+ pass
114
+
115
+
116
+ class HttpClient(CommunicationClient):
117
+ """
118
+ Concrete implementation of the client, this one handles the communication with the EEC
119
+ """
120
+
121
+ def __init__(self, base_url: str, datasource_id: str, id_token_file_path: str, logger: logging.Logger):
122
+ self._activation_config_url = f"{base_url}/userconfig/{datasource_id}"
123
+ self._extension_config_url = f"{base_url}/extconfig/{datasource_id}"
124
+ self._metric_url = f"{base_url}/mint/{datasource_id}"
125
+ self._sfm_url = f"{base_url}/sfm/{datasource_id}"
126
+ self._keep_alive_url = f"{base_url}/alive/{datasource_id}"
127
+ self._timediff_url = f"{base_url}/timediffms"
128
+ self._events_url = f"{base_url}/logs/{datasource_id}"
129
+ self._count_metric_register_url = f"{base_url}/countmetricregister/{datasource_id}"
130
+ self._count_delta_signal_url = f"{base_url}/countmetricdeltasignal/{datasource_id}"
131
+ self._feature_sets_query = "?feature_sets_json"
132
+ self._event_ingest_url = f"{base_url}/events/{datasource_id}"
133
+
134
+ with open(id_token_file_path) as f:
135
+ id_token = f.read()
136
+ self._headers = {"Authorization": f"Api-Token {id_token}"}
137
+
138
+ self.logger = logger
139
+
140
+ def _make_request(
141
+ self,
142
+ url: str,
143
+ method: str = "GET",
144
+ body: Any = None,
145
+ extra_headers: dict | None = None,
146
+ is_delta_signal: bool = False,
147
+ ) -> Response:
148
+ if extra_headers is None:
149
+ extra_headers = {}
150
+ headers = {**self._headers, **extra_headers}
151
+
152
+ response = request(method, url, body=body, headers=headers)
153
+ self.logger.debug(f"Response from {url}: {response}")
154
+ if response.status_code >= HTTP_BAD_REQUEST:
155
+ if not is_delta_signal:
156
+ self.logger.warning(f"Error HTTP {response.status_code} from {url}: {response.content}")
157
+ return response
158
+
159
+ def get_activation_config(self) -> dict:
160
+ try:
161
+ response = self._make_request(self._activation_config_url, "GET")
162
+ except HTTPException as err:
163
+ self.logger.error(f"HTTP exception: {err}")
164
+ return {}
165
+
166
+ if response.status_code < HTTP_BAD_REQUEST:
167
+ try:
168
+ return response.json()
169
+ except Exception as err:
170
+ self.logger.error(f"JSON parse failure: {err}")
171
+ return {}
172
+ else:
173
+ self.logger.error(f"Can't get activation configuration ({response.content}). Extension is stopped.")
174
+ sys.exit(1)
175
+
176
+ def get_extension_config(self) -> str:
177
+ try:
178
+ response = self._make_request(self._extension_config_url, "GET")
179
+ return response.content.decode("utf-8")
180
+ except HTTPException as err:
181
+ self.logger.error(f"HTTP exception: {err}")
182
+ return ""
183
+
184
+ def get_feature_sets(self) -> dict[str, list[str]]:
185
+ try:
186
+ response = self._make_request(self._extension_config_url + self._feature_sets_query, "GET")
187
+ except HTTPException as err:
188
+ self.logger.error(f"HTTP exception: {err}")
189
+ return {}
190
+
191
+ if response.status_code < HTTP_BAD_REQUEST:
192
+ try:
193
+ return response.json()
194
+ except Exception as err:
195
+ self.logger.error(f"JSON parse failure: {err}")
196
+ return {}
197
+
198
+ return {}
199
+
200
+ def register_count_metrics(self, json_pattern: dict[str, dict[str, COUNT_METRIC_ITEMS_DICT]]) -> None:
201
+ register_data = json.dumps(json_pattern).encode("utf-8")
202
+ try:
203
+ response = self._make_request(
204
+ self._count_metric_register_url,
205
+ "POST",
206
+ register_data,
207
+ extra_headers={"Content-Type": CONTENT_TYPE_JSON},
208
+ )
209
+ if response.ok:
210
+ self.logger.debug(
211
+ f"Monotonic cache converter successful registration for metric {list(json_pattern.keys())}."
212
+ )
213
+ except HTTPException:
214
+ self.logger.error(
215
+ f"Monotonic cache converter registration request error for metric {list(json_pattern.keys())}."
216
+ )
217
+
218
+ def send_count_delta_signal(self, metric_keys: set[str]) -> None:
219
+ json_data = {"metric_keys": list(metric_keys), "filter_dimensions": {}}
220
+ delta_signal_data = json.dumps(json_data).encode("utf-8")
221
+ try:
222
+ response = self._make_request(
223
+ self._count_delta_signal_url,
224
+ "POST",
225
+ delta_signal_data,
226
+ extra_headers={"Content-Type": CONTENT_TYPE_JSON},
227
+ is_delta_signal=True,
228
+ )
229
+ if response.ok:
230
+ self.logger.debug(
231
+ f"Monotonic converter cache delta calculation signal success for metric {metric_keys}."
232
+ )
233
+ else:
234
+ self.logger.debug(
235
+ f"Not enough metrics of type {metric_keys} cached in monotonic cache converter to calculate delta."
236
+ )
237
+ except HTTPException:
238
+ self.logger.error(
239
+ f"Monotonic cache converter delta calculation signal request error for metric {metric_keys}."
240
+ )
241
+
242
+ def send_dt_event(self, event: dict[str, str | int | dict[str, str]]):
243
+ json_data = json.dumps(event).encode("utf-8")
244
+ try:
245
+ response = self._make_request(
246
+ self._event_ingest_url, "POST", json_data, extra_headers={"Content-Type": CONTENT_TYPE_JSON}
247
+ )
248
+ if response.ok:
249
+ self.logger.debug(f"DT Event sent to EEC, content: {json_data.decode('utf-8')}")
250
+ else:
251
+ self.logger.debug(f"DT Event request failed: {response.content}")
252
+ except HTTPException:
253
+ self.logger.error(f"DT Event request HTTP exception, request body: {json_data.decode('utf-8')}")
254
+
255
+ def send_status(self, status: Status) -> dict:
256
+ encoded_data = json.dumps(status.to_json()).encode("utf-8")
257
+ self.logger.debug(f"Sending status to EEC: {status}")
258
+ response = self._make_request(
259
+ self._keep_alive_url, "POST", encoded_data, extra_headers={"Content-Type": CONTENT_TYPE_JSON}
260
+ ).content
261
+ return json.loads(response.decode("utf-8"))
262
+
263
+ def send_keep_alive(self):
264
+ return self.send_status(Status())
265
+
266
+ def send_metrics(self, mint_lines: list[str]) -> list[MintResponse]:
267
+ total_lines = len(mint_lines)
268
+ lines_sent = 0
269
+
270
+ self.logger.debug(f"Start sending {total_lines} metrics to the EEC")
271
+ responses = []
272
+
273
+ # We divide into chunks of MAX_MINT_LINES_PER_REQUEST lines to avoid hitting the body size limit
274
+ chunks = divide_into_chunks(mint_lines, MAX_MINT_LINES_PER_REQUEST)
275
+
276
+ for chunk in chunks:
277
+ lines_in_chunk = len(chunk)
278
+ lines_sent += lines_in_chunk
279
+ self.logger.debug(f"Sending chunk with {lines_in_chunk} metric lines. ({lines_sent}/{total_lines})")
280
+ mint_data = "\n".join(chunk).encode("utf-8")
281
+ response = self._make_request(
282
+ self._metric_url, "POST", mint_data, extra_headers={"Content-Type": CONTENT_TYPE_PLAIN}
283
+ ).json()
284
+ self.logger.debug(f"{self._metric_url}: {response}")
285
+ mint_response = MintResponse.from_json(response)
286
+ responses.append(mint_response)
287
+ return responses
288
+
289
+ def send_events(self, events: dict | list[dict], eec_enrichment: bool = True) -> list[dict | None]:
290
+ self.logger.debug(f"Sending log events: {events}")
291
+
292
+ responses = []
293
+ batches = divide_logs_into_batches([events] if isinstance(events, dict) else events)
294
+
295
+ for batch in batches:
296
+ try:
297
+ encoded_batch = json.dumps(batch).encode("utf-8")
298
+ eec_response = self._make_request(
299
+ self._events_url,
300
+ "POST",
301
+ encoded_batch,
302
+ extra_headers={"Content-Type": CONTENT_TYPE_JSON, "eec-enrichment": str(eec_enrichment).lower()},
303
+ ).json()
304
+ responses.append(eec_response)
305
+ except json.JSONDecodeError:
306
+ responses.append(None)
307
+
308
+ return responses
309
+
310
+ def send_sfm_metrics(self, mint_lines: list[str]) -> MintResponse:
311
+ mint_data = "\n".join(mint_lines).encode("utf-8")
312
+ return MintResponse.from_json(
313
+ self._make_request(
314
+ self._sfm_url, "POST", mint_data, extra_headers={"Content-Type": CONTENT_TYPE_PLAIN}
315
+ ).json()
316
+ )
317
+
318
+ def get_cluster_time_diff(self) -> int:
319
+ response = self._make_request(self._timediff_url, "GET")
320
+ time_diff = response.json()["clusterDiffMs"]
321
+ return time_diff
322
+
323
+
324
+ class DebugClient(CommunicationClient):
325
+ """
326
+ This client is used for debugging purposes
327
+ It does not send metrics to Dynatrace, but prints them to the console
328
+ """
329
+
330
+ def __init__(
331
+ self,
332
+ activation_config_path: str,
333
+ extension_config_path: str,
334
+ logger: logging.Logger,
335
+ local_ingest: bool = False,
336
+ local_ingest_port: int = 14499,
337
+ print_metrics: bool = True
338
+ ):
339
+ self.activation_config = {}
340
+ if activation_config_path and Path(activation_config_path).exists():
341
+ with open(activation_config_path) as f:
342
+ self.activation_config = json.load(f)
343
+
344
+ self.extension_config = ""
345
+ if not extension_config_path:
346
+ extension_config_path = "extension/extension.yaml"
347
+ if Path(extension_config_path).exists():
348
+ with open(extension_config_path) as f:
349
+ self.extension_config = f.read()
350
+ self.logger = logger
351
+ self.local_ingest = local_ingest
352
+ self.local_ingest_port = local_ingest_port
353
+ self.print_metrics = print_metrics
354
+
355
+ def get_activation_config(self) -> dict:
356
+ return self.activation_config
357
+
358
+ def get_extension_config(self) -> str:
359
+ return self.extension_config
360
+
361
+ def get_feature_sets(self) -> dict[str, list[str]]:
362
+ # This is only called from dt-sdk run, where PyYaml is installed because of dt-cli
363
+ # Do NOT move this to the top of the file
364
+ import yaml # type: ignore
365
+
366
+ # Grab the feature sets from the extension.yaml file
367
+ extension_yaml = yaml.safe_load(self.extension_config)
368
+ if not extension_yaml:
369
+ return {}
370
+
371
+ yaml_feature_sets = extension_yaml.get("python", {}).get("featureSets", [])
372
+ if not yaml_feature_sets:
373
+ return {}
374
+
375
+ # Construct the object that the SDK expects
376
+ feature_sets = {}
377
+ for feature_set in yaml_feature_sets:
378
+ feature_set_name = feature_set["featureSet"]
379
+ if feature_set_name in self.activation_config.get("featureSets", []):
380
+ feature_sets[feature_set_name] = [metric["key"] for metric in feature_set["metrics"]]
381
+
382
+ return feature_sets
383
+
384
+ def register_count_metrics(self, pattern: dict[str, dict[str, COUNT_METRIC_ITEMS_DICT]]) -> None:
385
+ self.logger.info(f"Registering metrics in converter: {pattern}")
386
+
387
+ def send_count_delta_signal(self, metric_keys: set[str]) -> None:
388
+ self.logger.info(f"Sending delta signal for: {metric_keys}")
389
+
390
+ def send_dt_event(self, event: dict) -> None:
391
+ self.logger.info(f"Sending DT Event: {event}")
392
+
393
+ def send_status(self, status: Status) -> dict:
394
+ self.logger.info(f"send_status: '{status}'")
395
+ return {}
396
+
397
+ def send_keep_alive(self):
398
+ return self.send_status(Status())
399
+
400
+ def send_metrics(self, mint_lines: list[str]) -> list[MintResponse]:
401
+ total_lines = len(mint_lines)
402
+ lines_sent = 0
403
+
404
+ self.logger.info(f"Start sending {total_lines} metrics to the EEC")
405
+
406
+ responses = []
407
+
408
+ chunks = divide_into_chunks(mint_lines, MAX_MINT_LINES_PER_REQUEST)
409
+ for chunk in chunks:
410
+ lines_in_chunk = len(chunk)
411
+ lines_sent += lines_in_chunk
412
+ self.logger.debug(f"Sending chunk with {lines_in_chunk} metric lines. ({lines_sent}/{total_lines})")
413
+
414
+ if self.local_ingest:
415
+ mint_data = "\n".join(chunk).encode("utf-8")
416
+ response = request(
417
+ "POST",
418
+ f"http://localhost:{self.local_ingest_port}/metrics/ingest",
419
+ body=mint_data,
420
+ headers={"Content-Type": CONTENT_TYPE_PLAIN},
421
+ ).json()
422
+ mint_response = MintResponse.from_json(response)
423
+ responses.append(mint_response)
424
+ else:
425
+ if self.print_metrics:
426
+ for line in mint_lines:
427
+ self.logger.info(f"send_metric: {line}")
428
+
429
+ response = MintResponse(lines_invalid=0, lines_ok=len(chunk), error=None, warnings=None)
430
+ responses.append(response)
431
+ return responses
432
+
433
+ def send_events(self, events: dict | list[dict], eec_enrichment: bool = True) -> list[dict | None]:
434
+ self.logger.info(f"send_events (enrichment = {eec_enrichment}): {len(events)} events")
435
+ if self.print_metrics:
436
+ for event in events:
437
+ self.logger.info(f"sendf_event: {event}")
438
+ return []
439
+
440
+ def send_sfm_metrics(self, mint_lines: list[str]) -> MintResponse:
441
+ for line in mint_lines:
442
+ self.logger.info(f"send_sfm_metric: {line}")
443
+ return MintResponse(lines_invalid=0, lines_ok=len(mint_lines), error=None, warnings=None)
444
+
445
+ def get_cluster_time_diff(self) -> int:
446
+ return 0
447
+
448
+
449
+ def divide_into_chunks(iterable: Iterable, chunk_size: int) -> Iterable:
450
+ """
451
+ Yield successive n-sized chunks from iterable.
452
+ Example: _chunk([1, 2, 3, 4, 5, 6, 7, 8, 9], 3) -> [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
453
+
454
+ :param iterable: The iterable to chunk
455
+ :param chunk_size: The size of the chunks
456
+ """
457
+ iterator = iter(iterable)
458
+ while True:
459
+ subset = list(islice(iterator, chunk_size))
460
+ if not subset:
461
+ return
462
+ yield subset
463
+
464
+ def divide_logs_into_batches(logs: list[dict]):
465
+ """
466
+ Yield successive batches from a list of log events, according to sizing limitations
467
+ imposed by the EEC: 5 MB payload, 50,000 events
468
+
469
+ :param logs: The list of log events
470
+ """
471
+ events_left = len(logs)
472
+ events = deque(logs)
473
+
474
+ batch = []
475
+ batch_size = 0
476
+ batch_items = 0
477
+
478
+ while events_left > 0:
479
+ if batch_items == MAX_LOG_EVENTS_PER_REQUEST:
480
+ yield batch
481
+ batch = []
482
+ batch_size = 0
483
+ batch_items = 0
484
+ continue
485
+
486
+ event = events.popleft()
487
+ events_left -= 1
488
+
489
+ if event is not None:
490
+ event_size = len(event)
491
+
492
+ if batch_size + event_size >= MAX_LOG_REQUEST_SIZE:
493
+ yield batch
494
+ batch = [event]
495
+ batch_size = event_size
496
+ batch_items = 1
497
+ else:
498
+ batch.append(event)
499
+ batch_size += event_size
500
+ batch_items += 1
501
+ else:
502
+ yield batch
503
+
504
+ @dataclass
505
+ class MintResponse:
506
+ lines_ok: int
507
+ lines_invalid: int
508
+ error: dict | None
509
+ warnings: dict | None
510
+
511
+ @staticmethod
512
+ def from_json(json_data: dict) -> MintResponse:
513
+ return MintResponse(
514
+ lines_ok=json_data.get("linesOk", 0),
515
+ lines_invalid=json_data.get("linesInvalid", 0),
516
+ error=json_data.get("error"),
517
+ warnings=json_data.get("warnings"),
518
+ )
519
+
520
+ def __str__(self) -> str:
521
+ return f"MintResponse(lines_ok={self.lines_ok}, lines_invalid={self.lines_invalid}, error={self.error}, warnings={self.warnings})"