dt-extensions-sdk 1.1.19__py3-none-any.whl → 1.1.20__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: dt-extensions-sdk
3
- Version: 1.1.19
3
+ Version: 1.1.20
4
4
  Project-URL: Documentation, https://github.com/dynatrace-extensions/dt-extensions-python-sdk#readme
5
5
  Project-URL: Issues, https://github.com/dynatrace-extensions/dt-extensions-python-sdk/issues
6
6
  Project-URL: Source, https://github.com/dynatrace-extensions/dt-extensions-python-sdk
@@ -1,4 +1,4 @@
1
- dynatrace_extension/__about__.py,sha256=eK_1QX-n9dlWHMtuWO8671EwIG7rKIdY_7SC5tYSNe0,110
1
+ dynatrace_extension/__about__.py,sha256=-8wSDjpuHMK14QbY11SSVFtSWsTg2XYDKWCAQc82ob4,110
2
2
  dynatrace_extension/__init__.py,sha256=BvQuknmA7ti3WJi3zEXZfY7aAxJrie37VNitWICsUvI,752
3
3
  dynatrace_extension/cli/__init__.py,sha256=HCboY_eJPoqjFmoPDsBL8Jk6aNvank8K7JpkVrgwzUM,123
4
4
  dynatrace_extension/cli/main.py,sha256=wi3xxji3WbVXViebzLCB5UGNSADUcdYUzGI02EYbsaM,16965
@@ -16,7 +16,7 @@ dynatrace_extension/cli/create/extension_template/extension_name/__main__.py.tem
16
16
  dynatrace_extension/sdk/__init__.py,sha256=RsqQ1heGyCmSK3fhuEKAcxQIRCg4gEK0-eSkIehL5Nc,86
17
17
  dynatrace_extension/sdk/activation.py,sha256=goTbT1tD2kn8xfyXFdTy_cTZNcFPJpgbvQM8HOzKECA,1480
18
18
  dynatrace_extension/sdk/callback.py,sha256=1P9uQ388j6q6a-8VgmV5FxcOMH-Mr3zvWXk2ftVD8j4,5828
19
- dynatrace_extension/sdk/communication.py,sha256=Z8Gbvd18ISf1pqXCDsBEfFeNU996J16aPgKUhJ082Zc,18804
19
+ dynatrace_extension/sdk/communication.py,sha256=dfnafZvpBCSQDGHGuNl70Ym1W1RlIuJMynDxMOfnFCs,17592
20
20
  dynatrace_extension/sdk/event.py,sha256=J261imbFKpxfuAQ6Nfu3RRcsIQKKivy6fme1nww2g-8,388
21
21
  dynatrace_extension/sdk/extension.py,sha256=3883WIlA5hM-0kNbdFaOtxYC_KzW-jPLMMaf0EgHRBo,40642
22
22
  dynatrace_extension/sdk/helper.py,sha256=ZNrO9ao2hE3KQ934vAYD74k0fCr6QTG-_bAvbk9-hi8,6562
@@ -26,8 +26,8 @@ dynatrace_extension/sdk/vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5
26
26
  dynatrace_extension/sdk/vendor/mureq/LICENSE,sha256=8AVcgZgiT_mvK1fOofXtRRr2f1dRXS_K21NuxQgP4VM,671
27
27
  dynatrace_extension/sdk/vendor/mureq/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
28
  dynatrace_extension/sdk/vendor/mureq/mureq.py,sha256=TQ3xcpfwLEYIU1TU65A9OqWqwIKqyO8SSUFeuCvE60Y,14655
29
- dt_extensions_sdk-1.1.19.dist-info/METADATA,sha256=BjBxbVeBUwaGKVSQ_C_G1sQJLOOi7CuJj5eBEUoY3JM,2794
30
- dt_extensions_sdk-1.1.19.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87
31
- dt_extensions_sdk-1.1.19.dist-info/entry_points.txt,sha256=pweyOCgENGHjOlT6_kXYaBPOrE3p18K0UettqnNlnoE,55
32
- dt_extensions_sdk-1.1.19.dist-info/licenses/LICENSE.txt,sha256=3Zihv0lOVYHNfDkJC-tUAU6euP9r2NexsDW4w-zqgVk,1078
33
- dt_extensions_sdk-1.1.19.dist-info/RECORD,,
29
+ dt_extensions_sdk-1.1.20.dist-info/METADATA,sha256=TSHy4wLRnheyQx1cHs9GaOmaIDsqMZ5PxZ2GNoAbu_w,2794
30
+ dt_extensions_sdk-1.1.20.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87
31
+ dt_extensions_sdk-1.1.20.dist-info/entry_points.txt,sha256=pweyOCgENGHjOlT6_kXYaBPOrE3p18K0UettqnNlnoE,55
32
+ dt_extensions_sdk-1.1.20.dist-info/licenses/LICENSE.txt,sha256=3Zihv0lOVYHNfDkJC-tUAU6euP9r2NexsDW4w-zqgVk,1078
33
+ dt_extensions_sdk-1.1.20.dist-info/RECORD,,
@@ -2,4 +2,4 @@
2
2
  #
3
3
  # SPDX-License-Identifier: MIT
4
4
 
5
- __version__ = "1.1.19"
5
+ __version__ = "1.1.20"
@@ -6,25 +6,25 @@ from __future__ import annotations
6
6
 
7
7
  import json
8
8
  import logging
9
- import random
10
9
  import sys
11
- import time
12
10
  from abc import ABC, abstractmethod
13
- from collections import deque
14
11
  from dataclasses import dataclass
15
12
  from enum import Enum
16
- from itertools import islice
17
13
  from pathlib import Path
18
- from typing import Any, Dict, Iterable, List, TypeVar, Union
14
+ from typing import Any, Generator, List, Sequence, TypeVar, Union
19
15
 
20
16
  from .vendor.mureq.mureq import HTTPException, Response, request
21
17
 
22
18
  CONTENT_TYPE_JSON = "application/json;charset=utf-8"
23
19
  CONTENT_TYPE_PLAIN = "text/plain;charset=utf-8"
24
20
  COUNT_METRIC_ITEMS_DICT = TypeVar("COUNT_METRIC_ITEMS_DICT", str, List[str])
21
+
22
+ # TODO - I believe these can be adjusted via RuntimeConfig, they can't be constants
25
23
  MAX_MINT_LINES_PER_REQUEST = 1000
26
24
  MAX_LOG_EVENTS_PER_REQUEST = 50_000
27
- MAX_LOG_REQUEST_SIZE = 5_000_000
25
+ MAX_LOG_REQUEST_SIZE = 5_000_000 # actually 5_242_880
26
+ MAX_METRIC_REQUEST_SIZE = 1_000_000 # actually 1_048_576
27
+
28
28
  HTTP_BAD_REQUEST = 400
29
29
 
30
30
 
@@ -264,22 +264,13 @@ class HttpClient(CommunicationClient):
264
264
  return self.send_status(Status())
265
265
 
266
266
  def send_metrics(self, mint_lines: list[str]) -> list[MintResponse]:
267
- total_lines = len(mint_lines)
268
- lines_sent = 0
269
-
270
- self.logger.debug(f"Start sending {total_lines} metrics to the EEC")
271
267
  responses = []
272
268
 
273
- # We divide into chunks of MAX_MINT_LINES_PER_REQUEST lines to avoid hitting the body size limit
274
- chunks = divide_into_chunks(mint_lines, MAX_MINT_LINES_PER_REQUEST)
275
-
276
- for chunk in chunks:
277
- lines_in_chunk = len(chunk)
278
- lines_sent += lines_in_chunk
279
- self.logger.debug(f"Sending chunk with {lines_in_chunk} metric lines. ({lines_sent}/{total_lines})")
280
- mint_data = "\n".join(chunk).encode("utf-8")
269
+ # We divide into batches of MAX_METRIC_REQUEST_SIZE bytes to avoid hitting the body size limit
270
+ batches = divide_into_batches(mint_lines, MAX_METRIC_REQUEST_SIZE, "\n")
271
+ for batch in batches:
281
272
  response = self._make_request(
282
- self._metric_url, "POST", mint_data, extra_headers={"Content-Type": CONTENT_TYPE_PLAIN}
273
+ self._metric_url, "POST", batch, extra_headers={"Content-Type": CONTENT_TYPE_PLAIN}
283
274
  ).json()
284
275
  self.logger.debug(f"{self._metric_url}: {response}")
285
276
  mint_response = MintResponse.from_json(response)
@@ -290,15 +281,16 @@ class HttpClient(CommunicationClient):
290
281
  self.logger.debug(f"Sending log events: {events}")
291
282
 
292
283
  responses = []
293
- batches = divide_logs_into_batches([events] if isinstance(events, dict) else events)
284
+ if isinstance(events, dict):
285
+ events = [events]
286
+ batches = divide_into_batches(events, MAX_LOG_REQUEST_SIZE)
294
287
 
295
288
  for batch in batches:
296
289
  try:
297
- encoded_batch = json.dumps(batch).encode("utf-8")
298
290
  eec_response = self._make_request(
299
291
  self._events_url,
300
292
  "POST",
301
- encoded_batch,
293
+ batch,
302
294
  extra_headers={"Content-Type": CONTENT_TYPE_JSON, "eec-enrichment": str(eec_enrichment).lower()},
303
295
  ).json()
304
296
  responses.append(eec_response)
@@ -399,24 +391,17 @@ class DebugClient(CommunicationClient):
399
391
 
400
392
  def send_metrics(self, mint_lines: list[str]) -> list[MintResponse]:
401
393
  total_lines = len(mint_lines)
402
- lines_sent = 0
403
-
404
394
  self.logger.info(f"Start sending {total_lines} metrics to the EEC")
405
395
 
406
396
  responses = []
407
397
 
408
- chunks = divide_into_chunks(mint_lines, MAX_MINT_LINES_PER_REQUEST)
409
- for chunk in chunks:
410
- lines_in_chunk = len(chunk)
411
- lines_sent += lines_in_chunk
412
- self.logger.debug(f"Sending chunk with {lines_in_chunk} metric lines. ({lines_sent}/{total_lines})")
413
-
398
+ batches = divide_into_batches(mint_lines, MAX_METRIC_REQUEST_SIZE)
399
+ for batch in batches:
414
400
  if self.local_ingest:
415
- mint_data = "\n".join(chunk).encode("utf-8")
416
401
  response = request(
417
402
  "POST",
418
403
  f"http://localhost:{self.local_ingest_port}/metrics/ingest",
419
- body=mint_data,
404
+ body=batch,
420
405
  headers={"Content-Type": CONTENT_TYPE_PLAIN},
421
406
  ).json()
422
407
  mint_response = MintResponse.from_json(response)
@@ -426,15 +411,13 @@ class DebugClient(CommunicationClient):
426
411
  for line in mint_lines:
427
412
  self.logger.info(f"send_metric: {line}")
428
413
 
429
- response = MintResponse(lines_invalid=0, lines_ok=len(chunk), error=None, warnings=None)
430
- responses.append(response)
431
414
  return responses
432
415
 
433
416
  def send_events(self, events: dict | list[dict], eec_enrichment: bool = True) -> list[dict | None]:
434
417
  self.logger.info(f"send_events (enrichment = {eec_enrichment}): {len(events)} events")
435
418
  if self.print_metrics:
436
419
  for event in events:
437
- self.logger.info(f"sendf_event: {event}")
420
+ self.logger.info(f"send_event: {event}")
438
421
  return []
439
422
 
440
423
  def send_sfm_metrics(self, mint_lines: list[str]) -> MintResponse:
@@ -446,60 +429,34 @@ class DebugClient(CommunicationClient):
446
429
  return 0
447
430
 
448
431
 
449
- def divide_into_chunks(iterable: Iterable, chunk_size: int) -> Iterable:
432
+ def divide_into_batches(items: Sequence[dict | str], max_size_bytes: int, join_with: str | None = None) -> Generator[bytes, None, None]:
450
433
  """
451
- Yield successive n-sized chunks from iterable.
452
- Example: _chunk([1, 2, 3, 4, 5, 6, 7, 8, 9], 3) -> [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
434
+ Yield successive batches from a list, according to sizing limitations
453
435
 
454
- :param iterable: The iterable to chunk
455
- :param chunk_size: The size of the chunks
456
- """
457
- iterator = iter(iterable)
458
- while True:
459
- subset = list(islice(iterator, chunk_size))
460
- if not subset:
461
- return
462
- yield subset
463
-
464
- def divide_logs_into_batches(logs: list[dict]):
436
+ :param items: The list items to divide, they myst be encodable to bytes
437
+ :param max_size_bytes: The maximum size of the payload in bytes
438
+ :param join_with: A string to join the items with before encoding
439
+ :return: A generator of batches of log events already encoded
465
440
  """
466
- Yield successive batches from a list of log events, according to sizing limitations
467
- imposed by the EEC: 5 MB payload, 50,000 events
468
441
 
469
- :param logs: The list of log events
470
- """
471
- events_left = len(logs)
472
- events = deque(logs)
473
-
474
- batch = []
475
- batch_size = 0
476
- batch_items = 0
477
-
478
- while events_left > 0:
479
- if batch_items == MAX_LOG_EVENTS_PER_REQUEST:
480
- yield batch
481
- batch = []
482
- batch_size = 0
483
- batch_items = 0
484
- continue
485
-
486
- event = events.popleft()
487
- events_left -= 1
488
-
489
- if event is not None:
490
- event_size = len(event)
491
-
492
- if batch_size + event_size >= MAX_LOG_REQUEST_SIZE:
493
- yield batch
494
- batch = [event]
495
- batch_size = event_size
496
- batch_items = 1
497
- else:
498
- batch.append(event)
499
- batch_size += event_size
500
- batch_items += 1
501
- else:
502
- yield batch
442
+ if not items:
443
+ return
444
+
445
+ if join_with is not None:
446
+ items = join_with.join(items)
447
+ encoded = f"{items}".encode(errors="replace")
448
+ size = len(encoded)
449
+ if size <= max_size_bytes:
450
+ yield encoded
451
+ return
452
+
453
+ # if we get here, the payload is too large, split it in half until we have chunks that are small enough
454
+ half = len(items) // 2
455
+ first_half = items[:half]
456
+ second_half = items[half:]
457
+ yield from divide_into_batches(first_half, max_size_bytes)
458
+ yield from divide_into_batches(second_half, max_size_bytes)
459
+
503
460
 
504
461
  @dataclass
505
462
  class MintResponse: