dt-extensions-sdk 1.2.4__py3-none-any.whl → 1.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dt_extensions_sdk-1.2.4.dist-info → dt_extensions_sdk-1.2.5.dist-info}/METADATA +1 -1
- dt_extensions_sdk-1.2.5.dist-info/RECORD +34 -0
- {dt_extensions_sdk-1.2.4.dist-info → dt_extensions_sdk-1.2.5.dist-info}/licenses/LICENSE.txt +9 -9
- dynatrace_extension/__about__.py +5 -5
- dynatrace_extension/__init__.py +27 -27
- dynatrace_extension/cli/__init__.py +5 -5
- dynatrace_extension/cli/create/__init__.py +1 -1
- dynatrace_extension/cli/create/create.py +76 -76
- dynatrace_extension/cli/create/extension_template/.gitignore.template +160 -160
- dynatrace_extension/cli/create/extension_template/README.md.template +33 -33
- dynatrace_extension/cli/create/extension_template/activation.json.template +15 -15
- dynatrace_extension/cli/create/extension_template/extension/activationSchema.json.template +118 -118
- dynatrace_extension/cli/create/extension_template/extension/extension.yaml.template +17 -17
- dynatrace_extension/cli/create/extension_template/extension_name/__main__.py.template +43 -43
- dynatrace_extension/cli/create/extension_template/setup.py.template +28 -28
- dynatrace_extension/cli/main.py +437 -437
- dynatrace_extension/cli/schema.py +129 -129
- dynatrace_extension/sdk/__init__.py +3 -3
- dynatrace_extension/sdk/activation.py +43 -43
- dynatrace_extension/sdk/callback.py +145 -145
- dynatrace_extension/sdk/communication.py +483 -483
- dynatrace_extension/sdk/event.py +19 -19
- dynatrace_extension/sdk/extension.py +1070 -1070
- dynatrace_extension/sdk/helper.py +191 -191
- dynatrace_extension/sdk/metric.py +118 -118
- dynatrace_extension/sdk/runtime.py +67 -67
- dynatrace_extension/sdk/snapshot.py +198 -198
- dynatrace_extension/sdk/vendor/mureq/LICENSE +13 -13
- dynatrace_extension/sdk/vendor/mureq/mureq.py +448 -448
- dt_extensions_sdk-1.2.4.dist-info/RECORD +0 -34
- {dt_extensions_sdk-1.2.4.dist-info → dt_extensions_sdk-1.2.5.dist-info}/WHEEL +0 -0
- {dt_extensions_sdk-1.2.4.dist-info → dt_extensions_sdk-1.2.5.dist-info}/entry_points.txt +0 -0
@@ -1,483 +1,483 @@
|
|
1
|
-
# SPDX-FileCopyrightText: 2023-present Dynatrace LLC
|
2
|
-
#
|
3
|
-
# SPDX-License-Identifier: MIT
|
4
|
-
|
5
|
-
from __future__ import annotations
|
6
|
-
|
7
|
-
import json
|
8
|
-
import logging
|
9
|
-
import sys
|
10
|
-
from abc import ABC, abstractmethod
|
11
|
-
from dataclasses import dataclass
|
12
|
-
from enum import Enum
|
13
|
-
from pathlib import Path
|
14
|
-
from typing import Any, Generator, List, Sequence, TypeVar
|
15
|
-
|
16
|
-
from .vendor.mureq.mureq import HTTPException, Response, request
|
17
|
-
|
18
|
-
CONTENT_TYPE_JSON = "application/json;charset=utf-8"
|
19
|
-
CONTENT_TYPE_PLAIN = "text/plain;charset=utf-8"
|
20
|
-
COUNT_METRIC_ITEMS_DICT = TypeVar("COUNT_METRIC_ITEMS_DICT", str, List[str])
|
21
|
-
|
22
|
-
# TODO - I believe these can be adjusted via RuntimeConfig, they can't be constants
|
23
|
-
MAX_MINT_LINES_PER_REQUEST = 1000
|
24
|
-
MAX_LOG_EVENTS_PER_REQUEST = 50_000
|
25
|
-
MAX_LOG_REQUEST_SIZE = 5_000_000 # actually 5_242_880
|
26
|
-
MAX_METRIC_REQUEST_SIZE = 1_000_000 # actually 1_048_576
|
27
|
-
|
28
|
-
HTTP_BAD_REQUEST = 400
|
29
|
-
|
30
|
-
|
31
|
-
class StatusValue(Enum):
|
32
|
-
EMPTY = ""
|
33
|
-
OK = "OK"
|
34
|
-
GENERIC_ERROR = "GENERIC_ERROR"
|
35
|
-
INVALID_ARGS_ERROR = "INVALID_ARGS_ERROR"
|
36
|
-
EEC_CONNECTION_ERROR = "EEC_CONNECTION_ERROR"
|
37
|
-
INVALID_CONFIG_ERROR = "INVALID_CONFIG_ERROR"
|
38
|
-
AUTHENTICATION_ERROR = "AUTHENTICATION_ERROR"
|
39
|
-
DEVICE_CONNECTION_ERROR = "DEVICE_CONNECTION_ERROR"
|
40
|
-
UNKNOWN_ERROR = "UNKNOWN_ERROR"
|
41
|
-
|
42
|
-
|
43
|
-
class Status:
|
44
|
-
def __init__(self, status: StatusValue = StatusValue.EMPTY, message: str = "", timestamp: int | None = None):
|
45
|
-
self.status = status
|
46
|
-
self.message = message
|
47
|
-
self.timestamp = timestamp
|
48
|
-
|
49
|
-
def to_json(self) -> dict:
|
50
|
-
status = {"status": self.status.value, "message": self.message}
|
51
|
-
if self.timestamp:
|
52
|
-
status["timestamp"] = self.timestamp # type: ignore
|
53
|
-
return status
|
54
|
-
|
55
|
-
def __repr__(self):
|
56
|
-
return json.dumps(self.to_json())
|
57
|
-
|
58
|
-
def is_error(self) -> bool:
|
59
|
-
return self.status not in (StatusValue.OK, StatusValue.EMPTY)
|
60
|
-
|
61
|
-
|
62
|
-
class CommunicationClient(ABC):
|
63
|
-
"""
|
64
|
-
Abstract class for extension communication
|
65
|
-
"""
|
66
|
-
|
67
|
-
@abstractmethod
|
68
|
-
def get_activation_config(self) -> dict:
|
69
|
-
pass
|
70
|
-
|
71
|
-
@abstractmethod
|
72
|
-
def get_extension_config(self) -> str:
|
73
|
-
pass
|
74
|
-
|
75
|
-
@abstractmethod
|
76
|
-
def get_feature_sets(self) -> dict[str, list[str]]:
|
77
|
-
pass
|
78
|
-
|
79
|
-
@abstractmethod
|
80
|
-
def register_count_metrics(self, pattern: dict[str, dict[str, COUNT_METRIC_ITEMS_DICT]]) -> None:
|
81
|
-
pass
|
82
|
-
|
83
|
-
@abstractmethod
|
84
|
-
def send_count_delta_signal(self, metric_keys: set[str]) -> None:
|
85
|
-
pass
|
86
|
-
|
87
|
-
@abstractmethod
|
88
|
-
def send_status(self, status: Status) -> dict:
|
89
|
-
pass
|
90
|
-
|
91
|
-
@abstractmethod
|
92
|
-
def send_keep_alive(self) -> str:
|
93
|
-
pass
|
94
|
-
|
95
|
-
@abstractmethod
|
96
|
-
def send_metrics(self, mint_lines: list[str]) -> list[MintResponse]:
|
97
|
-
pass
|
98
|
-
|
99
|
-
@abstractmethod
|
100
|
-
def send_events(self, event: dict | list[dict], eec_enrichment: bool) -> list[dict | None]:
|
101
|
-
pass
|
102
|
-
|
103
|
-
@abstractmethod
|
104
|
-
def send_sfm_metrics(self, metrics: list[str]) -> MintResponse:
|
105
|
-
pass
|
106
|
-
|
107
|
-
@abstractmethod
|
108
|
-
def get_cluster_time_diff(self) -> int:
|
109
|
-
pass
|
110
|
-
|
111
|
-
@abstractmethod
|
112
|
-
def send_dt_event(self, event: dict) -> None:
|
113
|
-
pass
|
114
|
-
|
115
|
-
|
116
|
-
class HttpClient(CommunicationClient):
|
117
|
-
"""
|
118
|
-
Concrete implementation of the client, this one handles the communication with the EEC
|
119
|
-
"""
|
120
|
-
|
121
|
-
def __init__(self, base_url: str, datasource_id: str, id_token_file_path: str, logger: logging.Logger):
|
122
|
-
self._activation_config_url = f"{base_url}/userconfig/{datasource_id}"
|
123
|
-
self._extension_config_url = f"{base_url}/extconfig/{datasource_id}"
|
124
|
-
self._metric_url = f"{base_url}/mint/{datasource_id}"
|
125
|
-
self._sfm_url = f"{base_url}/sfm/{datasource_id}"
|
126
|
-
self._keep_alive_url = f"{base_url}/alive/{datasource_id}"
|
127
|
-
self._timediff_url = f"{base_url}/timediffms"
|
128
|
-
self._events_url = f"{base_url}/logs/{datasource_id}"
|
129
|
-
self._count_metric_register_url = f"{base_url}/countmetricregister/{datasource_id}"
|
130
|
-
self._count_delta_signal_url = f"{base_url}/countmetricdeltasignal/{datasource_id}"
|
131
|
-
self._feature_sets_query = "?feature_sets_json"
|
132
|
-
self._event_ingest_url = f"{base_url}/events/{datasource_id}"
|
133
|
-
|
134
|
-
with open(id_token_file_path) as f:
|
135
|
-
id_token = f.read()
|
136
|
-
self._headers = {"Authorization": f"Api-Token {id_token}"}
|
137
|
-
|
138
|
-
self.logger = logger
|
139
|
-
|
140
|
-
def _make_request(
|
141
|
-
self,
|
142
|
-
url: str,
|
143
|
-
method: str = "GET",
|
144
|
-
body: Any = None,
|
145
|
-
extra_headers: dict | None = None,
|
146
|
-
is_delta_signal: bool = False,
|
147
|
-
) -> Response:
|
148
|
-
if extra_headers is None:
|
149
|
-
extra_headers = {}
|
150
|
-
headers = {**self._headers, **extra_headers}
|
151
|
-
|
152
|
-
response = request(method, url, body=body, headers=headers)
|
153
|
-
self.logger.debug(f"Response from {url}: {response}")
|
154
|
-
if response.status_code >= HTTP_BAD_REQUEST:
|
155
|
-
if not is_delta_signal:
|
156
|
-
self.logger.warning(f"Error HTTP {response.status_code} from {url}: {response.content}")
|
157
|
-
return response
|
158
|
-
|
159
|
-
def get_activation_config(self) -> dict:
|
160
|
-
try:
|
161
|
-
response = self._make_request(self._activation_config_url, "GET")
|
162
|
-
except HTTPException as err:
|
163
|
-
self.logger.error(f"HTTP exception: {err}")
|
164
|
-
return {}
|
165
|
-
|
166
|
-
if response.status_code < HTTP_BAD_REQUEST:
|
167
|
-
try:
|
168
|
-
return response.json()
|
169
|
-
except Exception as err:
|
170
|
-
self.logger.error(f"JSON parse failure: {err}")
|
171
|
-
return {}
|
172
|
-
else:
|
173
|
-
self.logger.error(f"Can't get activation configuration ({response.content}). Extension is stopped.")
|
174
|
-
sys.exit(1)
|
175
|
-
|
176
|
-
def get_extension_config(self) -> str:
|
177
|
-
try:
|
178
|
-
response = self._make_request(self._extension_config_url, "GET")
|
179
|
-
return response.content.decode("utf-8")
|
180
|
-
except HTTPException as err:
|
181
|
-
self.logger.error(f"HTTP exception: {err}")
|
182
|
-
return ""
|
183
|
-
|
184
|
-
def get_feature_sets(self) -> dict[str, list[str]]:
|
185
|
-
try:
|
186
|
-
response = self._make_request(self._extension_config_url + self._feature_sets_query, "GET")
|
187
|
-
except HTTPException as err:
|
188
|
-
self.logger.error(f"HTTP exception: {err}")
|
189
|
-
return {}
|
190
|
-
|
191
|
-
if response.status_code < HTTP_BAD_REQUEST:
|
192
|
-
try:
|
193
|
-
return response.json()
|
194
|
-
except Exception as err:
|
195
|
-
self.logger.error(f"JSON parse failure: {err}")
|
196
|
-
return {}
|
197
|
-
|
198
|
-
return {}
|
199
|
-
|
200
|
-
def register_count_metrics(self, json_pattern: dict[str, dict[str, COUNT_METRIC_ITEMS_DICT]]) -> None:
|
201
|
-
register_data = json.dumps(json_pattern).encode("utf-8")
|
202
|
-
try:
|
203
|
-
response = self._make_request(
|
204
|
-
self._count_metric_register_url,
|
205
|
-
"POST",
|
206
|
-
register_data,
|
207
|
-
extra_headers={"Content-Type": CONTENT_TYPE_JSON},
|
208
|
-
)
|
209
|
-
if response.ok:
|
210
|
-
self.logger.debug(
|
211
|
-
f"Monotonic cache converter successful registration for metric {list(json_pattern.keys())}."
|
212
|
-
)
|
213
|
-
except HTTPException:
|
214
|
-
self.logger.error(
|
215
|
-
f"Monotonic cache converter registration request error for metric {list(json_pattern.keys())}."
|
216
|
-
)
|
217
|
-
|
218
|
-
def send_count_delta_signal(self, metric_keys: set[str]) -> None:
|
219
|
-
json_data = {"metric_keys": list(metric_keys), "filter_dimensions": {}}
|
220
|
-
delta_signal_data = json.dumps(json_data).encode("utf-8")
|
221
|
-
try:
|
222
|
-
response = self._make_request(
|
223
|
-
self._count_delta_signal_url,
|
224
|
-
"POST",
|
225
|
-
delta_signal_data,
|
226
|
-
extra_headers={"Content-Type": CONTENT_TYPE_JSON},
|
227
|
-
is_delta_signal=True,
|
228
|
-
)
|
229
|
-
if response.ok:
|
230
|
-
self.logger.debug(
|
231
|
-
f"Monotonic converter cache delta calculation signal success for metric {metric_keys}."
|
232
|
-
)
|
233
|
-
else:
|
234
|
-
self.logger.debug(
|
235
|
-
f"Not enough metrics of type {metric_keys} cached in monotonic cache converter to calculate delta."
|
236
|
-
)
|
237
|
-
except HTTPException:
|
238
|
-
self.logger.error(
|
239
|
-
f"Monotonic cache converter delta calculation signal request error for metric {metric_keys}."
|
240
|
-
)
|
241
|
-
|
242
|
-
def send_dt_event(self, event: dict[str, str | int | dict[str, str]]):
|
243
|
-
json_data = json.dumps(event).encode("utf-8")
|
244
|
-
try:
|
245
|
-
response = self._make_request(
|
246
|
-
self._event_ingest_url, "POST", json_data, extra_headers={"Content-Type": CONTENT_TYPE_JSON}
|
247
|
-
)
|
248
|
-
if response.ok:
|
249
|
-
self.logger.debug(f"DT Event sent to EEC, content: {json_data.decode('utf-8')}")
|
250
|
-
else:
|
251
|
-
self.logger.debug(f"DT Event request failed: {response.content}")
|
252
|
-
except HTTPException:
|
253
|
-
self.logger.error(f"DT Event request HTTP exception, request body: {json_data.decode('utf-8')}")
|
254
|
-
|
255
|
-
def send_status(self, status: Status) -> dict:
|
256
|
-
encoded_data = json.dumps(status.to_json()).encode("utf-8")
|
257
|
-
self.logger.debug(f"Sending status to EEC: {status}")
|
258
|
-
response = self._make_request(
|
259
|
-
self._keep_alive_url, "POST", encoded_data, extra_headers={"Content-Type": CONTENT_TYPE_JSON}
|
260
|
-
).content
|
261
|
-
return json.loads(response.decode("utf-8"))
|
262
|
-
|
263
|
-
def send_keep_alive(self):
|
264
|
-
return self.send_status(Status())
|
265
|
-
|
266
|
-
def send_metrics(self, mint_lines: list[str]) -> list[MintResponse]:
|
267
|
-
responses = []
|
268
|
-
|
269
|
-
# We divide into batches of MAX_METRIC_REQUEST_SIZE bytes to avoid hitting the body size limit
|
270
|
-
batches = divide_into_batches(mint_lines, MAX_METRIC_REQUEST_SIZE, "\n")
|
271
|
-
for batch in batches:
|
272
|
-
response = self._make_request(
|
273
|
-
self._metric_url, "POST", batch, extra_headers={"Content-Type": CONTENT_TYPE_PLAIN}
|
274
|
-
).json()
|
275
|
-
self.logger.debug(f"{self._metric_url}: {response}")
|
276
|
-
mint_response = MintResponse.from_json(response)
|
277
|
-
responses.append(mint_response)
|
278
|
-
return responses
|
279
|
-
|
280
|
-
def send_events(self, events: dict | list[dict], eec_enrichment: bool = True) -> list[dict | None]:
|
281
|
-
self.logger.debug(f"Sending log events: {events}")
|
282
|
-
|
283
|
-
responses = []
|
284
|
-
if isinstance(events, dict):
|
285
|
-
events = [events]
|
286
|
-
batches = divide_into_batches(events, MAX_LOG_REQUEST_SIZE)
|
287
|
-
|
288
|
-
for batch in batches:
|
289
|
-
try:
|
290
|
-
eec_response = self._make_request(
|
291
|
-
self._events_url,
|
292
|
-
"POST",
|
293
|
-
batch,
|
294
|
-
extra_headers={"Content-Type": CONTENT_TYPE_JSON, "eec-enrichment": str(eec_enrichment).lower()},
|
295
|
-
).json()
|
296
|
-
responses.append(eec_response)
|
297
|
-
except json.JSONDecodeError:
|
298
|
-
responses.append(None)
|
299
|
-
|
300
|
-
return responses
|
301
|
-
|
302
|
-
def send_sfm_metrics(self, mint_lines: list[str]) -> MintResponse:
|
303
|
-
mint_data = "\n".join(mint_lines).encode("utf-8")
|
304
|
-
return MintResponse.from_json(
|
305
|
-
self._make_request(
|
306
|
-
self._sfm_url, "POST", mint_data, extra_headers={"Content-Type": CONTENT_TYPE_PLAIN}
|
307
|
-
).json()
|
308
|
-
)
|
309
|
-
|
310
|
-
def get_cluster_time_diff(self) -> int:
|
311
|
-
response = self._make_request(self._timediff_url, "GET")
|
312
|
-
time_diff = response.json()["clusterDiffMs"]
|
313
|
-
return time_diff
|
314
|
-
|
315
|
-
|
316
|
-
class DebugClient(CommunicationClient):
|
317
|
-
"""
|
318
|
-
This client is used for debugging purposes
|
319
|
-
It does not send metrics to Dynatrace, but prints them to the console
|
320
|
-
"""
|
321
|
-
|
322
|
-
def __init__(
|
323
|
-
self,
|
324
|
-
activation_config_path: str,
|
325
|
-
extension_config_path: str,
|
326
|
-
logger: logging.Logger,
|
327
|
-
local_ingest: bool = False,
|
328
|
-
local_ingest_port: int = 14499,
|
329
|
-
print_metrics: bool = True,
|
330
|
-
):
|
331
|
-
self.activation_config = {}
|
332
|
-
if activation_config_path and Path(activation_config_path).exists():
|
333
|
-
with open(activation_config_path) as f:
|
334
|
-
self.activation_config = json.load(f)
|
335
|
-
|
336
|
-
self.extension_config = ""
|
337
|
-
if not extension_config_path:
|
338
|
-
extension_config_path = "extension/extension.yaml"
|
339
|
-
if Path(extension_config_path).exists():
|
340
|
-
with open(extension_config_path) as f:
|
341
|
-
self.extension_config = f.read()
|
342
|
-
self.logger = logger
|
343
|
-
self.local_ingest = local_ingest
|
344
|
-
self.local_ingest_port = local_ingest_port
|
345
|
-
self.print_metrics = print_metrics
|
346
|
-
|
347
|
-
def get_activation_config(self) -> dict:
|
348
|
-
return self.activation_config
|
349
|
-
|
350
|
-
def get_extension_config(self) -> str:
|
351
|
-
return self.extension_config
|
352
|
-
|
353
|
-
def get_feature_sets(self) -> dict[str, list[str]]:
|
354
|
-
# This is only called from dt-sdk run, where PyYaml is installed because of dt-cli
|
355
|
-
# Do NOT move this to the top of the file
|
356
|
-
import yaml # type: ignore
|
357
|
-
|
358
|
-
# Grab the feature sets from the extension.yaml file
|
359
|
-
extension_yaml = yaml.safe_load(self.extension_config)
|
360
|
-
if not extension_yaml:
|
361
|
-
return {}
|
362
|
-
|
363
|
-
yaml_feature_sets = extension_yaml.get("python", {}).get("featureSets", [])
|
364
|
-
if not yaml_feature_sets:
|
365
|
-
return {}
|
366
|
-
|
367
|
-
# Construct the object that the SDK expects
|
368
|
-
feature_sets = {}
|
369
|
-
for feature_set in yaml_feature_sets:
|
370
|
-
feature_set_name = feature_set["featureSet"]
|
371
|
-
if feature_set_name in self.activation_config.get("featureSets", []):
|
372
|
-
feature_sets[feature_set_name] = [metric["key"] for metric in feature_set["metrics"]]
|
373
|
-
|
374
|
-
return feature_sets
|
375
|
-
|
376
|
-
def register_count_metrics(self, pattern: dict[str, dict[str, COUNT_METRIC_ITEMS_DICT]]) -> None:
|
377
|
-
self.logger.info(f"Registering metrics in converter: {pattern}")
|
378
|
-
|
379
|
-
def send_count_delta_signal(self, metric_keys: set[str]) -> None:
|
380
|
-
self.logger.info(f"Sending delta signal for: {metric_keys}")
|
381
|
-
|
382
|
-
def send_dt_event(self, event: dict) -> None:
|
383
|
-
self.logger.info(f"Sending DT Event: {event}")
|
384
|
-
|
385
|
-
def send_status(self, status: Status) -> dict:
|
386
|
-
self.logger.info(f"send_status: '{status}'")
|
387
|
-
return {}
|
388
|
-
|
389
|
-
def send_keep_alive(self):
|
390
|
-
return self.send_status(Status())
|
391
|
-
|
392
|
-
def send_metrics(self, mint_lines: list[str]) -> list[MintResponse]:
|
393
|
-
total_lines = len(mint_lines)
|
394
|
-
self.logger.info(f"Start sending {total_lines} metrics to the EEC")
|
395
|
-
|
396
|
-
responses = []
|
397
|
-
|
398
|
-
batches = divide_into_batches(mint_lines, MAX_METRIC_REQUEST_SIZE)
|
399
|
-
for batch in batches:
|
400
|
-
if self.local_ingest:
|
401
|
-
response = request(
|
402
|
-
"POST",
|
403
|
-
f"http://localhost:{self.local_ingest_port}/metrics/ingest",
|
404
|
-
body=batch,
|
405
|
-
headers={"Content-Type": CONTENT_TYPE_PLAIN},
|
406
|
-
).json()
|
407
|
-
mint_response = MintResponse.from_json(response)
|
408
|
-
responses.append(mint_response)
|
409
|
-
elif self.print_metrics:
|
410
|
-
for line in mint_lines:
|
411
|
-
self.logger.info(f"send_metric: {line}")
|
412
|
-
|
413
|
-
return responses
|
414
|
-
|
415
|
-
def send_events(self, events: dict | list[dict], eec_enrichment: bool = True) -> list[dict | None]:
|
416
|
-
self.logger.info(f"send_events (enrichment = {eec_enrichment}): {len(events)} events")
|
417
|
-
if isinstance(events, dict):
|
418
|
-
events = [events]
|
419
|
-
if self.print_metrics:
|
420
|
-
for event in events:
|
421
|
-
self.logger.info(f"send_event: {event}")
|
422
|
-
return []
|
423
|
-
|
424
|
-
def send_sfm_metrics(self, mint_lines: list[str]) -> MintResponse:
|
425
|
-
for line in mint_lines:
|
426
|
-
self.logger.info(f"send_sfm_metric: {line}")
|
427
|
-
return MintResponse(lines_invalid=0, lines_ok=len(mint_lines), error=None, warnings=None)
|
428
|
-
|
429
|
-
def get_cluster_time_diff(self) -> int:
|
430
|
-
return 0
|
431
|
-
|
432
|
-
|
433
|
-
def divide_into_batches(
|
434
|
-
items: Sequence[dict | str], max_size_bytes: int, join_with: str | None = None
|
435
|
-
) -> Generator[bytes, None, None]:
|
436
|
-
"""
|
437
|
-
Yield successive batches from a list, according to sizing limitations
|
438
|
-
|
439
|
-
:param items: The list items to divide, they myst be encodable to bytes
|
440
|
-
:param max_size_bytes: The maximum size of the payload in bytes
|
441
|
-
:param join_with: A string to join the items with before encoding
|
442
|
-
:return: A generator of batches of log events already encoded
|
443
|
-
"""
|
444
|
-
|
445
|
-
if not items:
|
446
|
-
return
|
447
|
-
|
448
|
-
if join_with is not None:
|
449
|
-
joined = join_with.join(items) # type: ignore
|
450
|
-
encoded = f"{joined}".encode(errors="replace")
|
451
|
-
else:
|
452
|
-
encoded = json.dumps(items).encode(errors="replace")
|
453
|
-
size = len(encoded)
|
454
|
-
if size <= max_size_bytes:
|
455
|
-
yield encoded
|
456
|
-
return
|
457
|
-
|
458
|
-
# if we get here, the payload is too large, split it in half until we have chunks that are small enough
|
459
|
-
half = len(items) // 2
|
460
|
-
first_half = items[:half]
|
461
|
-
second_half = items[half:]
|
462
|
-
yield from divide_into_batches(first_half, max_size_bytes, join_with)
|
463
|
-
yield from divide_into_batches(second_half, max_size_bytes, join_with)
|
464
|
-
|
465
|
-
|
466
|
-
@dataclass
|
467
|
-
class MintResponse:
|
468
|
-
lines_ok: int
|
469
|
-
lines_invalid: int
|
470
|
-
error: dict | None
|
471
|
-
warnings: dict | None
|
472
|
-
|
473
|
-
@staticmethod
|
474
|
-
def from_json(json_data: dict) -> MintResponse:
|
475
|
-
return MintResponse(
|
476
|
-
lines_ok=json_data.get("linesOk", 0),
|
477
|
-
lines_invalid=json_data.get("linesInvalid", 0),
|
478
|
-
error=json_data.get("error"),
|
479
|
-
warnings=json_data.get("warnings"),
|
480
|
-
)
|
481
|
-
|
482
|
-
def __str__(self) -> str:
|
483
|
-
return f"MintResponse(lines_ok={self.lines_ok}, lines_invalid={self.lines_invalid}, error={self.error}, warnings={self.warnings})"
|
1
|
+
# SPDX-FileCopyrightText: 2023-present Dynatrace LLC
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: MIT
|
4
|
+
|
5
|
+
from __future__ import annotations
|
6
|
+
|
7
|
+
import json
|
8
|
+
import logging
|
9
|
+
import sys
|
10
|
+
from abc import ABC, abstractmethod
|
11
|
+
from dataclasses import dataclass
|
12
|
+
from enum import Enum
|
13
|
+
from pathlib import Path
|
14
|
+
from typing import Any, Generator, List, Sequence, TypeVar
|
15
|
+
|
16
|
+
from .vendor.mureq.mureq import HTTPException, Response, request
|
17
|
+
|
18
|
+
CONTENT_TYPE_JSON = "application/json;charset=utf-8"
|
19
|
+
CONTENT_TYPE_PLAIN = "text/plain;charset=utf-8"
|
20
|
+
COUNT_METRIC_ITEMS_DICT = TypeVar("COUNT_METRIC_ITEMS_DICT", str, List[str])
|
21
|
+
|
22
|
+
# TODO - I believe these can be adjusted via RuntimeConfig, they can't be constants
|
23
|
+
MAX_MINT_LINES_PER_REQUEST = 1000
|
24
|
+
MAX_LOG_EVENTS_PER_REQUEST = 50_000
|
25
|
+
MAX_LOG_REQUEST_SIZE = 5_000_000 # actually 5_242_880
|
26
|
+
MAX_METRIC_REQUEST_SIZE = 1_000_000 # actually 1_048_576
|
27
|
+
|
28
|
+
HTTP_BAD_REQUEST = 400
|
29
|
+
|
30
|
+
|
31
|
+
class StatusValue(Enum):
|
32
|
+
EMPTY = ""
|
33
|
+
OK = "OK"
|
34
|
+
GENERIC_ERROR = "GENERIC_ERROR"
|
35
|
+
INVALID_ARGS_ERROR = "INVALID_ARGS_ERROR"
|
36
|
+
EEC_CONNECTION_ERROR = "EEC_CONNECTION_ERROR"
|
37
|
+
INVALID_CONFIG_ERROR = "INVALID_CONFIG_ERROR"
|
38
|
+
AUTHENTICATION_ERROR = "AUTHENTICATION_ERROR"
|
39
|
+
DEVICE_CONNECTION_ERROR = "DEVICE_CONNECTION_ERROR"
|
40
|
+
UNKNOWN_ERROR = "UNKNOWN_ERROR"
|
41
|
+
|
42
|
+
|
43
|
+
class Status:
|
44
|
+
def __init__(self, status: StatusValue = StatusValue.EMPTY, message: str = "", timestamp: int | None = None):
|
45
|
+
self.status = status
|
46
|
+
self.message = message
|
47
|
+
self.timestamp = timestamp
|
48
|
+
|
49
|
+
def to_json(self) -> dict:
|
50
|
+
status = {"status": self.status.value, "message": self.message}
|
51
|
+
if self.timestamp:
|
52
|
+
status["timestamp"] = self.timestamp # type: ignore
|
53
|
+
return status
|
54
|
+
|
55
|
+
def __repr__(self):
|
56
|
+
return json.dumps(self.to_json())
|
57
|
+
|
58
|
+
def is_error(self) -> bool:
|
59
|
+
return self.status not in (StatusValue.OK, StatusValue.EMPTY)
|
60
|
+
|
61
|
+
|
62
|
+
class CommunicationClient(ABC):
|
63
|
+
"""
|
64
|
+
Abstract class for extension communication
|
65
|
+
"""
|
66
|
+
|
67
|
+
@abstractmethod
|
68
|
+
def get_activation_config(self) -> dict:
|
69
|
+
pass
|
70
|
+
|
71
|
+
@abstractmethod
|
72
|
+
def get_extension_config(self) -> str:
|
73
|
+
pass
|
74
|
+
|
75
|
+
@abstractmethod
|
76
|
+
def get_feature_sets(self) -> dict[str, list[str]]:
|
77
|
+
pass
|
78
|
+
|
79
|
+
@abstractmethod
|
80
|
+
def register_count_metrics(self, pattern: dict[str, dict[str, COUNT_METRIC_ITEMS_DICT]]) -> None:
|
81
|
+
pass
|
82
|
+
|
83
|
+
@abstractmethod
|
84
|
+
def send_count_delta_signal(self, metric_keys: set[str]) -> None:
|
85
|
+
pass
|
86
|
+
|
87
|
+
@abstractmethod
|
88
|
+
def send_status(self, status: Status) -> dict:
|
89
|
+
pass
|
90
|
+
|
91
|
+
@abstractmethod
|
92
|
+
def send_keep_alive(self) -> str:
|
93
|
+
pass
|
94
|
+
|
95
|
+
@abstractmethod
|
96
|
+
def send_metrics(self, mint_lines: list[str]) -> list[MintResponse]:
|
97
|
+
pass
|
98
|
+
|
99
|
+
@abstractmethod
|
100
|
+
def send_events(self, event: dict | list[dict], eec_enrichment: bool) -> list[dict | None]:
|
101
|
+
pass
|
102
|
+
|
103
|
+
@abstractmethod
|
104
|
+
def send_sfm_metrics(self, metrics: list[str]) -> MintResponse:
|
105
|
+
pass
|
106
|
+
|
107
|
+
@abstractmethod
|
108
|
+
def get_cluster_time_diff(self) -> int:
|
109
|
+
pass
|
110
|
+
|
111
|
+
@abstractmethod
|
112
|
+
def send_dt_event(self, event: dict) -> None:
|
113
|
+
pass
|
114
|
+
|
115
|
+
|
116
|
+
class HttpClient(CommunicationClient):
|
117
|
+
"""
|
118
|
+
Concrete implementation of the client, this one handles the communication with the EEC
|
119
|
+
"""
|
120
|
+
|
121
|
+
def __init__(self, base_url: str, datasource_id: str, id_token_file_path: str, logger: logging.Logger):
|
122
|
+
self._activation_config_url = f"{base_url}/userconfig/{datasource_id}"
|
123
|
+
self._extension_config_url = f"{base_url}/extconfig/{datasource_id}"
|
124
|
+
self._metric_url = f"{base_url}/mint/{datasource_id}"
|
125
|
+
self._sfm_url = f"{base_url}/sfm/{datasource_id}"
|
126
|
+
self._keep_alive_url = f"{base_url}/alive/{datasource_id}"
|
127
|
+
self._timediff_url = f"{base_url}/timediffms"
|
128
|
+
self._events_url = f"{base_url}/logs/{datasource_id}"
|
129
|
+
self._count_metric_register_url = f"{base_url}/countmetricregister/{datasource_id}"
|
130
|
+
self._count_delta_signal_url = f"{base_url}/countmetricdeltasignal/{datasource_id}"
|
131
|
+
self._feature_sets_query = "?feature_sets_json"
|
132
|
+
self._event_ingest_url = f"{base_url}/events/{datasource_id}"
|
133
|
+
|
134
|
+
with open(id_token_file_path) as f:
|
135
|
+
id_token = f.read()
|
136
|
+
self._headers = {"Authorization": f"Api-Token {id_token}"}
|
137
|
+
|
138
|
+
self.logger = logger
|
139
|
+
|
140
|
+
def _make_request(
|
141
|
+
self,
|
142
|
+
url: str,
|
143
|
+
method: str = "GET",
|
144
|
+
body: Any = None,
|
145
|
+
extra_headers: dict | None = None,
|
146
|
+
is_delta_signal: bool = False,
|
147
|
+
) -> Response:
|
148
|
+
if extra_headers is None:
|
149
|
+
extra_headers = {}
|
150
|
+
headers = {**self._headers, **extra_headers}
|
151
|
+
|
152
|
+
response = request(method, url, body=body, headers=headers)
|
153
|
+
self.logger.debug(f"Response from {url}: {response}")
|
154
|
+
if response.status_code >= HTTP_BAD_REQUEST:
|
155
|
+
if not is_delta_signal:
|
156
|
+
self.logger.warning(f"Error HTTP {response.status_code} from {url}: {response.content}")
|
157
|
+
return response
|
158
|
+
|
159
|
+
def get_activation_config(self) -> dict:
|
160
|
+
try:
|
161
|
+
response = self._make_request(self._activation_config_url, "GET")
|
162
|
+
except HTTPException as err:
|
163
|
+
self.logger.error(f"HTTP exception: {err}")
|
164
|
+
return {}
|
165
|
+
|
166
|
+
if response.status_code < HTTP_BAD_REQUEST:
|
167
|
+
try:
|
168
|
+
return response.json()
|
169
|
+
except Exception as err:
|
170
|
+
self.logger.error(f"JSON parse failure: {err}")
|
171
|
+
return {}
|
172
|
+
else:
|
173
|
+
self.logger.error(f"Can't get activation configuration ({response.content}). Extension is stopped.")
|
174
|
+
sys.exit(1)
|
175
|
+
|
176
|
+
def get_extension_config(self) -> str:
|
177
|
+
try:
|
178
|
+
response = self._make_request(self._extension_config_url, "GET")
|
179
|
+
return response.content.decode("utf-8")
|
180
|
+
except HTTPException as err:
|
181
|
+
self.logger.error(f"HTTP exception: {err}")
|
182
|
+
return ""
|
183
|
+
|
184
|
+
def get_feature_sets(self) -> dict[str, list[str]]:
|
185
|
+
try:
|
186
|
+
response = self._make_request(self._extension_config_url + self._feature_sets_query, "GET")
|
187
|
+
except HTTPException as err:
|
188
|
+
self.logger.error(f"HTTP exception: {err}")
|
189
|
+
return {}
|
190
|
+
|
191
|
+
if response.status_code < HTTP_BAD_REQUEST:
|
192
|
+
try:
|
193
|
+
return response.json()
|
194
|
+
except Exception as err:
|
195
|
+
self.logger.error(f"JSON parse failure: {err}")
|
196
|
+
return {}
|
197
|
+
|
198
|
+
return {}
|
199
|
+
|
200
|
+
def register_count_metrics(self, json_pattern: dict[str, dict[str, COUNT_METRIC_ITEMS_DICT]]) -> None:
|
201
|
+
register_data = json.dumps(json_pattern).encode("utf-8")
|
202
|
+
try:
|
203
|
+
response = self._make_request(
|
204
|
+
self._count_metric_register_url,
|
205
|
+
"POST",
|
206
|
+
register_data,
|
207
|
+
extra_headers={"Content-Type": CONTENT_TYPE_JSON},
|
208
|
+
)
|
209
|
+
if response.ok:
|
210
|
+
self.logger.debug(
|
211
|
+
f"Monotonic cache converter successful registration for metric {list(json_pattern.keys())}."
|
212
|
+
)
|
213
|
+
except HTTPException:
|
214
|
+
self.logger.error(
|
215
|
+
f"Monotonic cache converter registration request error for metric {list(json_pattern.keys())}."
|
216
|
+
)
|
217
|
+
|
218
|
+
def send_count_delta_signal(self, metric_keys: set[str]) -> None:
|
219
|
+
json_data = {"metric_keys": list(metric_keys), "filter_dimensions": {}}
|
220
|
+
delta_signal_data = json.dumps(json_data).encode("utf-8")
|
221
|
+
try:
|
222
|
+
response = self._make_request(
|
223
|
+
self._count_delta_signal_url,
|
224
|
+
"POST",
|
225
|
+
delta_signal_data,
|
226
|
+
extra_headers={"Content-Type": CONTENT_TYPE_JSON},
|
227
|
+
is_delta_signal=True,
|
228
|
+
)
|
229
|
+
if response.ok:
|
230
|
+
self.logger.debug(
|
231
|
+
f"Monotonic converter cache delta calculation signal success for metric {metric_keys}."
|
232
|
+
)
|
233
|
+
else:
|
234
|
+
self.logger.debug(
|
235
|
+
f"Not enough metrics of type {metric_keys} cached in monotonic cache converter to calculate delta."
|
236
|
+
)
|
237
|
+
except HTTPException:
|
238
|
+
self.logger.error(
|
239
|
+
f"Monotonic cache converter delta calculation signal request error for metric {metric_keys}."
|
240
|
+
)
|
241
|
+
|
242
|
+
def send_dt_event(self, event: dict[str, str | int | dict[str, str]]):
|
243
|
+
json_data = json.dumps(event).encode("utf-8")
|
244
|
+
try:
|
245
|
+
response = self._make_request(
|
246
|
+
self._event_ingest_url, "POST", json_data, extra_headers={"Content-Type": CONTENT_TYPE_JSON}
|
247
|
+
)
|
248
|
+
if response.ok:
|
249
|
+
self.logger.debug(f"DT Event sent to EEC, content: {json_data.decode('utf-8')}")
|
250
|
+
else:
|
251
|
+
self.logger.debug(f"DT Event request failed: {response.content}")
|
252
|
+
except HTTPException:
|
253
|
+
self.logger.error(f"DT Event request HTTP exception, request body: {json_data.decode('utf-8')}")
|
254
|
+
|
255
|
+
def send_status(self, status: Status) -> dict:
|
256
|
+
encoded_data = json.dumps(status.to_json()).encode("utf-8")
|
257
|
+
self.logger.debug(f"Sending status to EEC: {status}")
|
258
|
+
response = self._make_request(
|
259
|
+
self._keep_alive_url, "POST", encoded_data, extra_headers={"Content-Type": CONTENT_TYPE_JSON}
|
260
|
+
).content
|
261
|
+
return json.loads(response.decode("utf-8"))
|
262
|
+
|
263
|
+
def send_keep_alive(self):
|
264
|
+
return self.send_status(Status())
|
265
|
+
|
266
|
+
def send_metrics(self, mint_lines: list[str]) -> list[MintResponse]:
|
267
|
+
responses = []
|
268
|
+
|
269
|
+
# We divide into batches of MAX_METRIC_REQUEST_SIZE bytes to avoid hitting the body size limit
|
270
|
+
batches = divide_into_batches(mint_lines, MAX_METRIC_REQUEST_SIZE, "\n")
|
271
|
+
for batch in batches:
|
272
|
+
response = self._make_request(
|
273
|
+
self._metric_url, "POST", batch, extra_headers={"Content-Type": CONTENT_TYPE_PLAIN}
|
274
|
+
).json()
|
275
|
+
self.logger.debug(f"{self._metric_url}: {response}")
|
276
|
+
mint_response = MintResponse.from_json(response)
|
277
|
+
responses.append(mint_response)
|
278
|
+
return responses
|
279
|
+
|
280
|
+
def send_events(self, events: dict | list[dict], eec_enrichment: bool = True) -> list[dict | None]:
|
281
|
+
self.logger.debug(f"Sending log events: {events}")
|
282
|
+
|
283
|
+
responses = []
|
284
|
+
if isinstance(events, dict):
|
285
|
+
events = [events]
|
286
|
+
batches = divide_into_batches(events, MAX_LOG_REQUEST_SIZE)
|
287
|
+
|
288
|
+
for batch in batches:
|
289
|
+
try:
|
290
|
+
eec_response = self._make_request(
|
291
|
+
self._events_url,
|
292
|
+
"POST",
|
293
|
+
batch,
|
294
|
+
extra_headers={"Content-Type": CONTENT_TYPE_JSON, "eec-enrichment": str(eec_enrichment).lower()},
|
295
|
+
).json()
|
296
|
+
responses.append(eec_response)
|
297
|
+
except json.JSONDecodeError:
|
298
|
+
responses.append(None)
|
299
|
+
|
300
|
+
return responses
|
301
|
+
|
302
|
+
def send_sfm_metrics(self, mint_lines: list[str]) -> MintResponse:
|
303
|
+
mint_data = "\n".join(mint_lines).encode("utf-8")
|
304
|
+
return MintResponse.from_json(
|
305
|
+
self._make_request(
|
306
|
+
self._sfm_url, "POST", mint_data, extra_headers={"Content-Type": CONTENT_TYPE_PLAIN}
|
307
|
+
).json()
|
308
|
+
)
|
309
|
+
|
310
|
+
def get_cluster_time_diff(self) -> int:
|
311
|
+
response = self._make_request(self._timediff_url, "GET")
|
312
|
+
time_diff = response.json()["clusterDiffMs"]
|
313
|
+
return time_diff
|
314
|
+
|
315
|
+
|
316
|
+
class DebugClient(CommunicationClient):
|
317
|
+
"""
|
318
|
+
This client is used for debugging purposes
|
319
|
+
It does not send metrics to Dynatrace, but prints them to the console
|
320
|
+
"""
|
321
|
+
|
322
|
+
def __init__(
|
323
|
+
self,
|
324
|
+
activation_config_path: str,
|
325
|
+
extension_config_path: str,
|
326
|
+
logger: logging.Logger,
|
327
|
+
local_ingest: bool = False,
|
328
|
+
local_ingest_port: int = 14499,
|
329
|
+
print_metrics: bool = True,
|
330
|
+
):
|
331
|
+
self.activation_config = {}
|
332
|
+
if activation_config_path and Path(activation_config_path).exists():
|
333
|
+
with open(activation_config_path) as f:
|
334
|
+
self.activation_config = json.load(f)
|
335
|
+
|
336
|
+
self.extension_config = ""
|
337
|
+
if not extension_config_path:
|
338
|
+
extension_config_path = "extension/extension.yaml"
|
339
|
+
if Path(extension_config_path).exists():
|
340
|
+
with open(extension_config_path) as f:
|
341
|
+
self.extension_config = f.read()
|
342
|
+
self.logger = logger
|
343
|
+
self.local_ingest = local_ingest
|
344
|
+
self.local_ingest_port = local_ingest_port
|
345
|
+
self.print_metrics = print_metrics
|
346
|
+
|
347
|
+
def get_activation_config(self) -> dict:
|
348
|
+
return self.activation_config
|
349
|
+
|
350
|
+
def get_extension_config(self) -> str:
|
351
|
+
return self.extension_config
|
352
|
+
|
353
|
+
def get_feature_sets(self) -> dict[str, list[str]]:
|
354
|
+
# This is only called from dt-sdk run, where PyYaml is installed because of dt-cli
|
355
|
+
# Do NOT move this to the top of the file
|
356
|
+
import yaml # type: ignore
|
357
|
+
|
358
|
+
# Grab the feature sets from the extension.yaml file
|
359
|
+
extension_yaml = yaml.safe_load(self.extension_config)
|
360
|
+
if not extension_yaml:
|
361
|
+
return {}
|
362
|
+
|
363
|
+
yaml_feature_sets = extension_yaml.get("python", {}).get("featureSets", [])
|
364
|
+
if not yaml_feature_sets:
|
365
|
+
return {}
|
366
|
+
|
367
|
+
# Construct the object that the SDK expects
|
368
|
+
feature_sets = {}
|
369
|
+
for feature_set in yaml_feature_sets:
|
370
|
+
feature_set_name = feature_set["featureSet"]
|
371
|
+
if feature_set_name in self.activation_config.get("featureSets", []):
|
372
|
+
feature_sets[feature_set_name] = [metric["key"] for metric in feature_set["metrics"]]
|
373
|
+
|
374
|
+
return feature_sets
|
375
|
+
|
376
|
+
def register_count_metrics(self, pattern: dict[str, dict[str, COUNT_METRIC_ITEMS_DICT]]) -> None:
|
377
|
+
self.logger.info(f"Registering metrics in converter: {pattern}")
|
378
|
+
|
379
|
+
def send_count_delta_signal(self, metric_keys: set[str]) -> None:
|
380
|
+
self.logger.info(f"Sending delta signal for: {metric_keys}")
|
381
|
+
|
382
|
+
def send_dt_event(self, event: dict) -> None:
|
383
|
+
self.logger.info(f"Sending DT Event: {event}")
|
384
|
+
|
385
|
+
def send_status(self, status: Status) -> dict:
|
386
|
+
self.logger.info(f"send_status: '{status}'")
|
387
|
+
return {}
|
388
|
+
|
389
|
+
def send_keep_alive(self):
|
390
|
+
return self.send_status(Status())
|
391
|
+
|
392
|
+
def send_metrics(self, mint_lines: list[str]) -> list[MintResponse]:
|
393
|
+
total_lines = len(mint_lines)
|
394
|
+
self.logger.info(f"Start sending {total_lines} metrics to the EEC")
|
395
|
+
|
396
|
+
responses = []
|
397
|
+
|
398
|
+
batches = divide_into_batches(mint_lines, MAX_METRIC_REQUEST_SIZE)
|
399
|
+
for batch in batches:
|
400
|
+
if self.local_ingest:
|
401
|
+
response = request(
|
402
|
+
"POST",
|
403
|
+
f"http://localhost:{self.local_ingest_port}/metrics/ingest",
|
404
|
+
body=batch,
|
405
|
+
headers={"Content-Type": CONTENT_TYPE_PLAIN},
|
406
|
+
).json()
|
407
|
+
mint_response = MintResponse.from_json(response)
|
408
|
+
responses.append(mint_response)
|
409
|
+
elif self.print_metrics:
|
410
|
+
for line in mint_lines:
|
411
|
+
self.logger.info(f"send_metric: {line}")
|
412
|
+
|
413
|
+
return responses
|
414
|
+
|
415
|
+
def send_events(self, events: dict | list[dict], eec_enrichment: bool = True) -> list[dict | None]:
|
416
|
+
self.logger.info(f"send_events (enrichment = {eec_enrichment}): {len(events)} events")
|
417
|
+
if isinstance(events, dict):
|
418
|
+
events = [events]
|
419
|
+
if self.print_metrics:
|
420
|
+
for event in events:
|
421
|
+
self.logger.info(f"send_event: {event}")
|
422
|
+
return []
|
423
|
+
|
424
|
+
def send_sfm_metrics(self, mint_lines: list[str]) -> MintResponse:
|
425
|
+
for line in mint_lines:
|
426
|
+
self.logger.info(f"send_sfm_metric: {line}")
|
427
|
+
return MintResponse(lines_invalid=0, lines_ok=len(mint_lines), error=None, warnings=None)
|
428
|
+
|
429
|
+
def get_cluster_time_diff(self) -> int:
|
430
|
+
return 0
|
431
|
+
|
432
|
+
|
433
|
+
def divide_into_batches(
|
434
|
+
items: Sequence[dict | str], max_size_bytes: int, join_with: str | None = None
|
435
|
+
) -> Generator[bytes, None, None]:
|
436
|
+
"""
|
437
|
+
Yield successive batches from a list, according to sizing limitations
|
438
|
+
|
439
|
+
:param items: The list items to divide, they myst be encodable to bytes
|
440
|
+
:param max_size_bytes: The maximum size of the payload in bytes
|
441
|
+
:param join_with: A string to join the items with before encoding
|
442
|
+
:return: A generator of batches of log events already encoded
|
443
|
+
"""
|
444
|
+
|
445
|
+
if not items:
|
446
|
+
return
|
447
|
+
|
448
|
+
if join_with is not None:
|
449
|
+
joined = join_with.join(items) # type: ignore
|
450
|
+
encoded = f"{joined}".encode(errors="replace")
|
451
|
+
else:
|
452
|
+
encoded = json.dumps(items).encode(errors="replace")
|
453
|
+
size = len(encoded)
|
454
|
+
if size <= max_size_bytes:
|
455
|
+
yield encoded
|
456
|
+
return
|
457
|
+
|
458
|
+
# if we get here, the payload is too large, split it in half until we have chunks that are small enough
|
459
|
+
half = len(items) // 2
|
460
|
+
first_half = items[:half]
|
461
|
+
second_half = items[half:]
|
462
|
+
yield from divide_into_batches(first_half, max_size_bytes, join_with)
|
463
|
+
yield from divide_into_batches(second_half, max_size_bytes, join_with)
|
464
|
+
|
465
|
+
|
466
|
+
@dataclass
|
467
|
+
class MintResponse:
|
468
|
+
lines_ok: int
|
469
|
+
lines_invalid: int
|
470
|
+
error: dict | None
|
471
|
+
warnings: dict | None
|
472
|
+
|
473
|
+
@staticmethod
|
474
|
+
def from_json(json_data: dict) -> MintResponse:
|
475
|
+
return MintResponse(
|
476
|
+
lines_ok=json_data.get("linesOk", 0),
|
477
|
+
lines_invalid=json_data.get("linesInvalid", 0),
|
478
|
+
error=json_data.get("error"),
|
479
|
+
warnings=json_data.get("warnings"),
|
480
|
+
)
|
481
|
+
|
482
|
+
def __str__(self) -> str:
|
483
|
+
return f"MintResponse(lines_ok={self.lines_ok}, lines_invalid={self.lines_invalid}, error={self.error}, warnings={self.warnings})"
|