apitally 0.13.0__py3-none-any.whl → 0.14.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,14 +4,17 @@ import asyncio
4
4
  import logging
5
5
  import random
6
6
  import time
7
+ from contextlib import suppress
7
8
  from functools import partial
8
- from typing import Any, Dict, Optional, Tuple
9
+ from typing import Any, AsyncIterator, Dict, Optional, Tuple
10
+ from uuid import UUID
9
11
 
10
12
  import backoff
11
13
  import httpx
12
14
 
13
- from apitally.client.base import MAX_QUEUE_TIME, REQUEST_TIMEOUT, ApitallyClientBase
15
+ from apitally.client.client_base import MAX_QUEUE_TIME, REQUEST_TIMEOUT, ApitallyClientBase
14
16
  from apitally.client.logging import get_logger
17
+ from apitally.client.request_logging import RequestLoggingConfig
15
18
 
16
19
 
17
20
  logger = get_logger(__name__)
@@ -26,8 +29,8 @@ retry = partial(
26
29
 
27
30
 
28
31
  class ApitallyClient(ApitallyClientBase):
29
- def __init__(self, client_id: str, env: str) -> None:
30
- super().__init__(client_id=client_id, env=env)
32
+ def __init__(self, client_id: str, env: str, request_logging_config: Optional[RequestLoggingConfig] = None) -> None:
33
+ super().__init__(client_id=client_id, env=env, request_logging_config=request_logging_config)
31
34
  self._stop_sync_loop = False
32
35
  self._sync_loop_task: Optional[asyncio.Task] = None
33
36
  self._sync_data_queue: asyncio.Queue[Tuple[float, Dict[str, Any]]] = asyncio.Queue()
@@ -41,20 +44,27 @@ class ApitallyClient(ApitallyClientBase):
41
44
  self._sync_loop_task = asyncio.create_task(self._run_sync_loop())
42
45
 
43
46
  async def _run_sync_loop(self) -> None:
44
- first_iteration = True
47
+ last_sync_time = 0.0
45
48
  while not self._stop_sync_loop:
46
49
  try:
47
- time_start = time.perf_counter()
48
- async with self.get_http_client() as client:
49
- tasks = [self.send_sync_data(client)]
50
- if not self._startup_data_sent and not first_iteration:
51
- tasks.append(self.send_startup_data(client))
52
- await asyncio.gather(*tasks)
53
- time_elapsed = time.perf_counter() - time_start
54
- await asyncio.sleep(self.sync_interval - time_elapsed)
50
+ self.request_logger.write_to_file()
55
51
  except Exception: # pragma: no cover
56
- logger.exception("An error occurred during sync with Apitally hub")
57
- first_iteration = False
52
+ logger.exception("An error occurred while writing request logs")
53
+
54
+ now = time.time()
55
+ if (now - last_sync_time) >= self.sync_interval:
56
+ try:
57
+ async with self.get_http_client() as client:
58
+ tasks = [self.send_sync_data(client), self.send_log_data(client)]
59
+ if not self._startup_data_sent and last_sync_time > 0: # not on first sync
60
+ tasks.append(self.send_startup_data(client))
61
+ await asyncio.gather(*tasks)
62
+ last_sync_time = now
63
+ except Exception: # pragma: no cover
64
+ logger.exception("An error occurred during sync with Apitally hub")
65
+
66
+ self.request_logger.maintain()
67
+ await asyncio.sleep(1)
58
68
 
59
69
  def stop_sync_loop(self) -> None:
60
70
  self._stop_sync_loop = True
@@ -65,6 +75,7 @@ class ApitallyClient(ApitallyClientBase):
65
75
  # Send any remaining data before exiting
66
76
  async with self.get_http_client() as client:
67
77
  await self.send_sync_data(client)
78
+ await self.send_log_data(client)
68
79
 
69
80
  def set_startup_data(self, data: Dict[str, Any]) -> None:
70
81
  self._startup_data_sent = False
@@ -99,10 +110,27 @@ class ApitallyClient(ApitallyClientBase):
99
110
  finally:
100
111
  self._sync_data_queue.task_done()
101
112
 
113
+ async def send_log_data(self, client: httpx.AsyncClient) -> None:
114
+ self.request_logger.rotate_file()
115
+ i = 0
116
+ while log_file := self.request_logger.get_file():
117
+ if i > 0:
118
+ time.sleep(random.uniform(0.1, 0.3))
119
+ try:
120
+ stream = log_file.stream_lines_compressed()
121
+ await self._send_log_data(client, log_file.uuid, stream)
122
+ log_file.delete()
123
+ except httpx.HTTPError:
124
+ self.request_logger.retry_file_later(log_file)
125
+ break
126
+ i += 1
127
+ if i >= 10:
128
+ break
129
+
102
130
  @retry(raise_on_giveup=False)
103
131
  async def _send_startup_data(self, client: httpx.AsyncClient, data: Dict[str, Any]) -> None:
104
132
  logger.debug("Sending startup data to Apitally hub")
105
- response = await client.post(url="/startup", json=data, timeout=REQUEST_TIMEOUT)
133
+ response = await client.post(url="/startup", json=data)
106
134
  self._handle_hub_response(response)
107
135
  self._startup_data_sent = True
108
136
  self._startup_data = None
@@ -113,6 +141,17 @@ class ApitallyClient(ApitallyClientBase):
113
141
  response = await client.post(url="/sync", json=data)
114
142
  self._handle_hub_response(response)
115
143
 
144
+ async def _send_log_data(self, client: httpx.AsyncClient, uuid: UUID, stream: AsyncIterator[bytes]) -> None:
145
+ logger.debug("Streaming request log data to Apitally hub")
146
+ response = await client.post(url=f"{self.hub_url}/log?uuid={uuid}", content=stream)
147
+ if response.status_code == 402 and "Retry-After" in response.headers:
148
+ with suppress(ValueError):
149
+ retry_after = int(response.headers["Retry-After"])
150
+ self.request_logger.suspend_until = time.time() + retry_after
151
+ self.request_logger.clear()
152
+ return
153
+ self._handle_hub_response(response)
154
+
116
155
  def _handle_hub_response(self, response: httpx.Response) -> None:
117
156
  if response.status_code == 404:
118
157
  self.stop_sync_loop()
@@ -0,0 +1,97 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ import re
5
+ import threading
6
+ import time
7
+ from abc import ABC
8
+ from typing import Any, Dict, Optional, Type, TypeVar, cast
9
+ from uuid import UUID, uuid4
10
+
11
+ from apitally.client.consumers import ConsumerRegistry
12
+ from apitally.client.logging import get_logger
13
+ from apitally.client.request_logging import RequestLogger, RequestLoggingConfig
14
+ from apitally.client.requests import RequestCounter
15
+ from apitally.client.server_errors import ServerErrorCounter
16
+ from apitally.client.validation_errors import ValidationErrorCounter
17
+
18
+
19
+ logger = get_logger(__name__)
20
+
21
+ HUB_BASE_URL = os.getenv("APITALLY_HUB_BASE_URL") or "https://hub.apitally.io"
22
+ HUB_VERSION = "v2"
23
+ REQUEST_TIMEOUT = 10
24
+ MAX_QUEUE_TIME = 3600
25
+ SYNC_INTERVAL = 60
26
+ INITIAL_SYNC_INTERVAL = 10
27
+ INITIAL_SYNC_INTERVAL_DURATION = 3600
28
+
29
+ TApitallyClient = TypeVar("TApitallyClient", bound="ApitallyClientBase")
30
+
31
+
32
+ class ApitallyClientBase(ABC):
33
+ _instance: Optional[ApitallyClientBase] = None
34
+ _lock = threading.Lock()
35
+
36
+ def __new__(cls: Type[TApitallyClient], *args, **kwargs) -> TApitallyClient:
37
+ if cls._instance is None:
38
+ with cls._lock:
39
+ if cls._instance is None:
40
+ cls._instance = super().__new__(cls)
41
+ return cast(TApitallyClient, cls._instance)
42
+
43
+ def __init__(self, client_id: str, env: str, request_logging_config: Optional[RequestLoggingConfig] = None) -> None:
44
+ if hasattr(self, "client_id"):
45
+ raise RuntimeError("Apitally client is already initialized") # pragma: no cover
46
+ try:
47
+ UUID(client_id)
48
+ except ValueError:
49
+ raise ValueError(f"invalid client_id '{client_id}' (expecting hexadecimal UUID format)")
50
+ if re.match(r"^[\w-]{1,32}$", env) is None:
51
+ raise ValueError(f"invalid env '{env}' (expecting 1-32 alphanumeric lowercase characters and hyphens only)")
52
+
53
+ self.client_id = client_id
54
+ self.env = env
55
+ self.instance_uuid = str(uuid4())
56
+ self.request_counter = RequestCounter()
57
+ self.validation_error_counter = ValidationErrorCounter()
58
+ self.server_error_counter = ServerErrorCounter()
59
+ self.consumer_registry = ConsumerRegistry()
60
+ self.request_logger = RequestLogger(request_logging_config)
61
+
62
+ self._startup_data: Optional[Dict[str, Any]] = None
63
+ self._startup_data_sent = False
64
+ self._started_at = time.time()
65
+
66
+ @classmethod
67
+ def get_instance(cls: Type[TApitallyClient]) -> TApitallyClient:
68
+ if cls._instance is None:
69
+ raise RuntimeError("Apitally client not initialized") # pragma: no cover
70
+ return cast(TApitallyClient, cls._instance)
71
+
72
+ @property
73
+ def sync_interval(self) -> float:
74
+ return (
75
+ SYNC_INTERVAL if time.time() - self._started_at > INITIAL_SYNC_INTERVAL_DURATION else INITIAL_SYNC_INTERVAL
76
+ )
77
+
78
+ @property
79
+ def hub_url(self) -> str:
80
+ return f"{HUB_BASE_URL}/{HUB_VERSION}/{self.client_id}/{self.env}"
81
+
82
+ def add_uuids_to_data(self, data: Dict[str, Any]) -> Dict[str, Any]:
83
+ data_with_uuids = {
84
+ "instance_uuid": self.instance_uuid,
85
+ "message_uuid": str(uuid4()),
86
+ }
87
+ data_with_uuids.update(data)
88
+ return data_with_uuids
89
+
90
+ def get_sync_data(self) -> Dict[str, Any]:
91
+ data = {
92
+ "requests": self.request_counter.get_and_reset_requests(),
93
+ "validation_errors": self.validation_error_counter.get_and_reset_validation_errors(),
94
+ "server_errors": self.server_error_counter.get_and_reset_server_errors(),
95
+ "consumers": self.consumer_registry.get_and_reset_updated_consumers(),
96
+ }
97
+ return self.add_uuids_to_data(data)
@@ -1,18 +1,22 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import logging
4
- import queue
5
4
  import random
6
5
  import time
6
+ from contextlib import suppress
7
7
  from functools import partial
8
+ from io import BufferedReader
9
+ from queue import Queue
8
10
  from threading import Event, Thread
9
11
  from typing import Any, Callable, Dict, Optional, Tuple
12
+ from uuid import UUID
10
13
 
11
14
  import backoff
12
15
  import requests
13
16
 
14
- from apitally.client.base import MAX_QUEUE_TIME, REQUEST_TIMEOUT, ApitallyClientBase
17
+ from apitally.client.client_base import MAX_QUEUE_TIME, REQUEST_TIMEOUT, ApitallyClientBase
15
18
  from apitally.client.logging import get_logger
19
+ from apitally.client.request_logging import RequestLoggingConfig
16
20
 
17
21
 
18
22
  logger = get_logger(__name__)
@@ -43,11 +47,11 @@ except NameError:
43
47
 
44
48
 
45
49
  class ApitallyClient(ApitallyClientBase):
46
- def __init__(self, client_id: str, env: str) -> None:
47
- super().__init__(client_id=client_id, env=env)
50
+ def __init__(self, client_id: str, env: str, request_logging_config: Optional[RequestLoggingConfig] = None) -> None:
51
+ super().__init__(client_id=client_id, env=env, request_logging_config=request_logging_config)
48
52
  self._thread: Optional[Thread] = None
49
53
  self._stop_sync_loop = Event()
50
- self._sync_data_queue: queue.Queue[Tuple[float, Dict[str, Any]]] = queue.Queue()
54
+ self._sync_data_queue: Queue[Tuple[float, Dict[str, Any]]] = Queue()
51
55
 
52
56
  def start_sync_loop(self) -> None:
53
57
  self._stop_sync_loop.clear()
@@ -61,20 +65,29 @@ class ApitallyClient(ApitallyClientBase):
61
65
  last_sync_time = 0.0
62
66
  while not self._stop_sync_loop.is_set():
63
67
  try:
64
- now = time.time()
65
- if (now - last_sync_time) >= self.sync_interval:
68
+ self.request_logger.write_to_file()
69
+ except Exception: # pragma: no cover
70
+ logger.exception("An error occurred while writing request logs")
71
+
72
+ now = time.time()
73
+ if (now - last_sync_time) >= self.sync_interval:
74
+ try:
66
75
  with requests.Session() as session:
67
76
  if not self._startup_data_sent and last_sync_time > 0: # not on first sync
68
77
  self.send_startup_data(session)
69
78
  self.send_sync_data(session)
79
+ self.send_log_data(session)
70
80
  last_sync_time = now
71
- time.sleep(1)
72
- except Exception: # pragma: no cover
73
- logger.exception("An error occurred during sync with Apitally hub")
81
+ except Exception: # pragma: no cover
82
+ logger.exception("An error occurred during sync with Apitally hub")
83
+
84
+ self.request_logger.maintain()
85
+ time.sleep(1)
74
86
  finally:
75
87
  # Send any remaining data before exiting
76
88
  with requests.Session() as session:
77
89
  self.send_sync_data(session)
90
+ self.send_log_data(session)
78
91
 
79
92
  def stop_sync_loop(self) -> None:
80
93
  self._stop_sync_loop.set()
@@ -112,6 +125,23 @@ class ApitallyClient(ApitallyClientBase):
112
125
  finally:
113
126
  self._sync_data_queue.task_done()
114
127
 
128
+ def send_log_data(self, session: requests.Session) -> None:
129
+ self.request_logger.rotate_file()
130
+ i = 0
131
+ while log_file := self.request_logger.get_file():
132
+ if i > 0:
133
+ time.sleep(random.uniform(0.1, 0.3))
134
+ try:
135
+ with log_file.open_compressed() as fp:
136
+ self._send_log_data(session, log_file.uuid, fp)
137
+ log_file.delete()
138
+ except requests.RequestException:
139
+ self.request_logger.retry_file_later(log_file)
140
+ break
141
+ i += 1
142
+ if i >= 10:
143
+ break
144
+
115
145
  @retry(raise_on_giveup=False)
116
146
  def _send_startup_data(self, session: requests.Session, data: Dict[str, Any]) -> None:
117
147
  logger.debug("Sending startup data to Apitally hub")
@@ -126,6 +156,17 @@ class ApitallyClient(ApitallyClientBase):
126
156
  response = session.post(url=f"{self.hub_url}/sync", json=data, timeout=REQUEST_TIMEOUT)
127
157
  self._handle_hub_response(response)
128
158
 
159
+ def _send_log_data(self, session: requests.Session, uuid: UUID, fp: BufferedReader) -> None:
160
+ logger.debug("Streaming request log data to Apitally hub")
161
+ response = session.post(url=f"{self.hub_url}/log?uuid={uuid}", data=fp, timeout=REQUEST_TIMEOUT)
162
+ if response.status_code == 402 and "Retry-After" in response.headers:
163
+ with suppress(ValueError):
164
+ retry_after = int(response.headers["Retry-After"])
165
+ self.request_logger.suspend_until = time.time() + retry_after
166
+ self.request_logger.clear()
167
+ return
168
+ self._handle_hub_response(response)
169
+
129
170
  def _handle_hub_response(self, response: requests.Response) -> None:
130
171
  if response.status_code == 404:
131
172
  self.stop_sync_loop()
@@ -0,0 +1,66 @@
1
+ from __future__ import annotations
2
+
3
+ import threading
4
+ from typing import Any, Dict, List, Optional, Set, Union
5
+
6
+
7
+ class Consumer:
8
+ def __init__(self, identifier: str, name: Optional[str] = None, group: Optional[str] = None) -> None:
9
+ self.identifier = str(identifier).strip()[:128]
10
+ self.name = str(name).strip()[:64] if name else None
11
+ self.group = str(group).strip()[:64] if group else None
12
+
13
+ @classmethod
14
+ def from_string_or_object(cls, consumer: Optional[Union[str, Consumer]]) -> Optional[Consumer]:
15
+ if not consumer:
16
+ return None
17
+ if isinstance(consumer, Consumer):
18
+ return consumer
19
+ consumer = str(consumer).strip()
20
+ if not consumer:
21
+ return None
22
+ return cls(identifier=consumer)
23
+
24
+ def update(self, name: str | None = None, group: str | None = None) -> bool:
25
+ name = str(name).strip()[:64] if name else None
26
+ group = str(group).strip()[:64] if group else None
27
+ updated = False
28
+ if name and name != self.name:
29
+ self.name = name
30
+ updated = True
31
+ if group and group != self.group:
32
+ self.group = group
33
+ updated = True
34
+ return updated
35
+
36
+
37
+ class ConsumerRegistry:
38
+ def __init__(self) -> None:
39
+ self.consumers: Dict[str, Consumer] = {}
40
+ self.updated: Set[str] = set()
41
+ self._lock = threading.Lock()
42
+
43
+ def add_or_update_consumer(self, consumer: Optional[Consumer]) -> None:
44
+ if not consumer or (not consumer.name and not consumer.group):
45
+ return # Only register consumers with name or group set
46
+ with self._lock:
47
+ if consumer.identifier not in self.consumers:
48
+ self.consumers[consumer.identifier] = consumer
49
+ self.updated.add(consumer.identifier)
50
+ elif self.consumers[consumer.identifier].update(name=consumer.name, group=consumer.group):
51
+ self.updated.add(consumer.identifier)
52
+
53
+ def get_and_reset_updated_consumers(self) -> List[Dict[str, Any]]:
54
+ data: List[Dict[str, Any]] = []
55
+ with self._lock:
56
+ for identifier in self.updated:
57
+ if consumer := self.consumers.get(identifier):
58
+ data.append(
59
+ {
60
+ "identifier": consumer.identifier,
61
+ "name": str(consumer.name)[:64] if consumer.name else None,
62
+ "group": str(consumer.group)[:64] if consumer.group else None,
63
+ }
64
+ )
65
+ self.updated.clear()
66
+ return data