reach_commons 0.18.40__py3-none-any.whl → 0.18.41__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,17 +1,17 @@
1
- import logging
2
- import os
3
-
4
-
5
- def init_logger(name: str):
6
- logging.basicConfig(
7
- level=getattr(logging, os.getenv("LOG_LEVEL", "INFO").upper(), logging.INFO),
8
- format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
9
- force=True,
10
- )
11
- for noisy in ("botocore", "boto3", "urllib3"):
12
- logging.getLogger(noisy).setLevel(logging.WARNING)
13
- return logging.getLogger(name)
14
-
15
-
16
- def log_with_event(level_fn, msg, event):
17
- level_fn(f"{msg} | event={event}")
1
+ import logging
2
+ import os
3
+
4
+
5
+ def init_logger(name: str):
6
+ logging.basicConfig(
7
+ level=getattr(logging, os.getenv("LOG_LEVEL", "INFO").upper(), logging.INFO),
8
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
9
+ force=True,
10
+ )
11
+ for noisy in ("botocore", "boto3", "urllib3"):
12
+ logging.getLogger(noisy).setLevel(logging.WARNING)
13
+ return logging.getLogger(name)
14
+
15
+
16
+ def log_with_event(level_fn, msg, event):
17
+ level_fn(f"{msg} | event={event}")
@@ -1,3 +1,3 @@
1
- from reach_commons.reach_aws.db_config import get_secret
2
-
3
- __all__ = ["get_secret"]
1
+ from reach_commons.reach_aws.db_config import get_secret
2
+
3
+ __all__ = ["get_secret"]
@@ -1,111 +1,90 @@
1
- import base64
2
- import json
3
- import os
4
- from typing import Any, Dict
5
-
6
- import boto3
7
- from botocore.exceptions import ClientError
8
-
9
- from reach_commons.redis_manager import RedisManager
10
-
11
- ENV = os.environ.get("ENV", "Staging")
12
-
13
-
14
- def _get_secret_json(secret_arn: str, region_name: str = "us-east-1") -> Dict[str, Any]:
15
- """Fetch and parse a JSON secret from AWS Secrets Manager."""
16
- session = boto3.Session(region_name=region_name)
17
- client = session.client("secretsmanager")
18
-
19
- try:
20
- response = client.get_secret_value(SecretId=secret_arn)
21
- except ClientError as exc:
22
- raise RuntimeError(
23
- f"Failed to fetch secret from AWS Secrets Manager: secret_arn={secret_arn}"
24
- ) from exc
25
-
26
- secret_string = _extract_secret_string(response, secret_arn)
27
- try:
28
- return json.loads(secret_string)
29
- except json.JSONDecodeError as exc:
30
- raise ValueError(
31
- f"Secret value is not valid JSON: secret_arn={secret_arn}"
32
- ) from exc
33
-
34
-
35
- def _extract_secret_string(response: Dict[str, Any], secret_arn: str) -> str:
36
- if response.get("SecretBinary"):
37
- decoded = base64.b64decode(response["SecretBinary"])
38
- return decoded.decode("utf-8")
39
- secret_string = response.get("SecretString")
40
- if not secret_string:
41
- raise ValueError(
42
- f"Secret did not contain SecretString or SecretBinary: secret_arn={secret_arn}"
43
- )
44
- return secret_string
45
-
46
-
47
- def get_secret(
48
- secret_arn: str,
49
- region_name: str = "us-east-1",
50
- host=os.getenv("MYSQL_HOST"),
51
- db_name=os.getenv("MYSQL_DB_NAME"),
52
- cachemanager: RedisManager = None,
53
- ) -> Dict[str, Any]:
54
- """
55
- Load DB credentials from AWS Secrets Manager and host from SSM Parameter Store.
56
-
57
- Example:
58
- # from reach_commons.reach_aws import get_secret
59
- # config = get_secret(
60
- # os.environ[ "RDS_SECRET_ARN"],
61
- #)
62
- """
63
-
64
- if not secret_arn:
65
- raise ValueError(f"RDS secret ARN is not configured")
66
- if not host:
67
- raise ValueError(f"RDS host is not configured")
68
-
69
- secrets_data = None
70
- cache_key = None
71
- if cachemanager is not None:
72
- cache_key = f"reach_commons:db_secret:{secret_arn}:{region_name}"
73
- cached_secret = cachemanager.get(cache_key)
74
- if cached_secret:
75
- if isinstance(cached_secret, bytes):
76
- cached_secret = cached_secret.decode("utf-8")
77
- try:
78
- secrets_data = json.loads(cached_secret)
79
- except json.JSONDecodeError:
80
- secrets_data = None
81
-
82
- if secrets_data is None:
83
- secrets_data = _get_secret_json(secret_arn, region_name)
84
- if cachemanager is not None and cache_key is not None:
85
- try:
86
- cachemanager.add(
87
- cache_key, json.dumps(secrets_data), expire_seconds=3600
88
- )
89
- except Exception:
90
- pass
91
-
92
- if not isinstance(secrets_data, dict):
93
- raise ValueError(
94
- f"Secret payload must be a JSON object: secret_arn={secret_arn}"
95
- )
96
-
97
- secrets_data["host"] = host
98
- secrets_data["dbname"] = db_name
99
-
100
- missing = [
101
- key
102
- for key in ("host", "username", "password", "dbname")
103
- if key not in secrets_data
104
- ]
105
- if missing:
106
- raise ValueError(
107
- "Secret is missing required fields: "
108
- f"missing={missing}, secret_arn={secret_arn}"
109
- )
110
-
111
- return secrets_data
1
+ import base64
2
+ import json
3
+ import os
4
+ from typing import Any, Dict
5
+
6
+ import boto3
7
+ from botocore.exceptions import ClientError
8
+
9
+ ENV = os.environ.get("ENV", "Staging")
10
+
11
+
12
+ def _get_secret_json(secret_arn: str, region_name: str = "us-east-1") -> Dict[str, Any]:
13
+ """Fetch and parse a JSON secret from AWS Secrets Manager."""
14
+ session = boto3.Session(region_name=region_name)
15
+ client = session.client("secretsmanager")
16
+
17
+ try:
18
+ response = client.get_secret_value(SecretId=secret_arn)
19
+ except ClientError as exc:
20
+ raise RuntimeError(
21
+ f"Failed to fetch secret from AWS Secrets Manager: secret_arn={secret_arn}"
22
+ ) from exc
23
+
24
+ secret_string = _extract_secret_string(response, secret_arn)
25
+ try:
26
+ return json.loads(secret_string)
27
+ except json.JSONDecodeError as exc:
28
+ raise ValueError(
29
+ f"Secret value is not valid JSON: secret_arn={secret_arn}"
30
+ ) from exc
31
+
32
+
33
+ def _extract_secret_string(response: Dict[str, Any], secret_arn: str) -> str:
34
+ if response.get("SecretBinary"):
35
+ decoded = base64.b64decode(response["SecretBinary"])
36
+ return decoded.decode("utf-8")
37
+ secret_string = response.get("SecretString")
38
+ if not secret_string:
39
+ raise ValueError(
40
+ f"Secret did not contain SecretString or SecretBinary: secret_arn={secret_arn}"
41
+ )
42
+ return secret_string
43
+
44
+
45
+ def get_secret(
46
+ secret_arn: str,
47
+ region_name: str = "us-east-1",
48
+ host=os.getenv("MYSQL_HOST"),
49
+ db_name=os.getenv("MYSQL_DB_NAME"),
50
+ ) -> Dict[str, Any]:
51
+ """
52
+ Load DB credentials from AWS Secrets Manager and host from SSM Parameter Store.
53
+
54
+ Example:
55
+ # from reach_commons.reach_aws import get_secret
56
+ # config = get_secret(
57
+ # os.environ[ "RDS_SECRET_ARN"],
58
+ #)
59
+ """
60
+
61
+ if not secret_arn:
62
+ raise ValueError(f"RDS secret ARN is not configured")
63
+ if not host:
64
+ raise ValueError(f"RDS host is not configured")
65
+
66
+ secrets_data = None
67
+
68
+ if secrets_data is None:
69
+ secrets_data = _get_secret_json(secret_arn, region_name)
70
+
71
+ if not isinstance(secrets_data, dict):
72
+ raise ValueError(
73
+ f"Secret payload must be a JSON object: secret_arn={secret_arn}"
74
+ )
75
+
76
+ secrets_data["host"] = host
77
+ secrets_data["dbname"] = db_name
78
+
79
+ missing = [
80
+ key
81
+ for key in ("host", "username", "password", "dbname")
82
+ if key not in secrets_data
83
+ ]
84
+ if missing:
85
+ raise ValueError(
86
+ "Secret is missing required fields: "
87
+ f"missing={missing}, secret_arn={secret_arn}"
88
+ )
89
+
90
+ return secrets_data
@@ -1,242 +1,242 @@
1
- import random
2
- import time
3
- from dataclasses import dataclass
4
- from typing import Optional, Tuple
5
-
6
- from reach_commons.redis_manager import RedisManager
7
-
8
- # Atomic fixed-window limiter (safe under high concurrency).
9
- _LUA_WINDOW_LIMITER = """
10
- -- KEYS[1] = window_counter_key
11
- -- ARGV[1] = tokens_to_consume
12
- -- ARGV[2] = ttl_seconds
13
- -- ARGV[3] = limit_per_window
14
-
15
- local tokens = tonumber(ARGV[1])
16
- local ttl = tonumber(ARGV[2])
17
- local limit = tonumber(ARGV[3])
18
-
19
- local current = redis.call('INCRBY', KEYS[1], tokens)
20
-
21
- -- If this is the first increment for this window, set TTL
22
- if current == tokens then
23
- redis.call('EXPIRE', KEYS[1], ttl)
24
- end
25
-
26
- if current <= limit then
27
- return 1
28
- else
29
- return 0
30
- end
31
- """
32
-
33
-
34
- @dataclass(frozen=True)
35
- class AcquireResult:
36
- allowed: bool
37
- retry_after_seconds: int
38
-
39
-
40
- class ReachRateLimiter:
41
- """
42
- ReachRateLimiter (fixed-window limiter) backed by Redis.
43
-
44
- Configurable live via Redis (no redeploy needed).
45
- Atomic under heavy concurrency (Lua runs inside Redis).
46
- Returns retry_after_seconds (use it to ChangeMessageVisibility / Delay).
47
-
48
- Redis keys used:
49
- - Config hash:
50
- {key_prefix}:cfg:{bucket_key}
51
- Fields (all optional):
52
- - limit_per_window (int)
53
- - interval_seconds (int)
54
- - jitter_seconds (int)
55
-
56
- - Per-window counter:
57
- {key_prefix}:{bucket_key}:{window_start}
58
-
59
- Suggested defaults:
60
- interval_seconds=2
61
- limit_per_window=2000 (=> ~1000/s)
62
- jitter_seconds=2 or 3
63
- """
64
-
65
- def __init__(
66
- self,
67
- redis_manager: RedisManager,
68
- bucket_key: str,
69
- key_prefix: str = "rate_limiter",
70
- default_limit_per_window: int = 2000,
71
- default_interval_seconds: int = 2,
72
- default_jitter_seconds: Optional[int] = None,
73
- # Cache config in-memory per Lambda container (to reduce Redis reads):
74
- config_cache_seconds: int = 2,
75
- # if Redis is down, deny by default to avoid stampede downstream
76
- deny_on_redis_error: bool = True,
77
- ):
78
- self.redis = redis_manager
79
- self.bucket_key = bucket_key
80
- self.key_prefix = key_prefix
81
-
82
- self.default_limit = int(default_limit_per_window)
83
- self.default_interval = int(default_interval_seconds)
84
- self.default_jitter = (
85
- int(default_jitter_seconds)
86
- if default_jitter_seconds is not None
87
- else int(default_interval_seconds)
88
- )
89
-
90
- self.config_cache_seconds = max(0, int(config_cache_seconds))
91
- self.deny_on_redis_error = bool(deny_on_redis_error)
92
-
93
- self._lua = _LUA_WINDOW_LIMITER
94
-
95
- # Per-container cache (each Lambda container caches for a short time)
96
- self._cached_cfg: Optional[Tuple[int, int, int]] = None
97
- self._cached_cfg_ts: float = 0.0
98
-
99
- # -------------------------
100
- # Redis key helpers
101
- # -------------------------
102
- def _cfg_key(self) -> str:
103
- return f"{self.key_prefix}:cfg:{self.bucket_key}"
104
-
105
- def _counter_key(self, window_start: int) -> str:
106
- return f"{self.key_prefix}:{self.bucket_key}:{window_start}"
107
-
108
- # -------------------------
109
- # Time helpers
110
- # -------------------------
111
- def _now(self) -> float:
112
- return time.time()
113
-
114
- def _window_start(self, now: float, interval_seconds: int) -> int:
115
- return int(now // interval_seconds) * interval_seconds
116
-
117
- # -------------------------
118
- # Config loading (from Redis hash)
119
- # -------------------------
120
- @staticmethod
121
- def _parse_int(value, fallback: int) -> int:
122
- try:
123
- if value is None:
124
- return fallback
125
- if isinstance(value, (bytes, bytearray)):
126
- value = value.decode("utf-8", errors="ignore")
127
- return int(value)
128
- except Exception:
129
- return fallback
130
-
131
- def _load_config(self) -> Tuple[int, int, int]:
132
- """
133
- Loads config from Redis hash:
134
- limit_per_window, interval_seconds, jitter_seconds
135
-
136
- Behavior:
137
- - If config exists in Redis: read only (never overwrite).
138
- - If config does NOT exist yet: seed Redis ONCE with defaults (so you can edit live).
139
- - If Redis is unavailable: fallback to defaults (no writes).
140
- """
141
- now = self._now()
142
-
143
- if (
144
- self._cached_cfg is not None
145
- and self.config_cache_seconds > 0
146
- and (now - self._cached_cfg_ts) < self.config_cache_seconds
147
- ):
148
- return self._cached_cfg
149
-
150
- limit = self.default_limit
151
- interval = self.default_interval
152
- jitter = self.default_jitter
153
-
154
- try:
155
- cfg_key = self._cfg_key()
156
- raw = self.redis.hgetall(cfg_key) or {}
157
-
158
- # If config was never created, seed it once with defaults
159
- if not raw:
160
- rc = self.redis.redis_connection
161
- rc.hsetnx(cfg_key, "limit_per_window", str(self.default_limit))
162
- rc.hsetnx(cfg_key, "interval_seconds", str(self.default_interval))
163
- rc.hsetnx(cfg_key, "jitter_seconds", str(self.default_jitter))
164
-
165
- # Re-read after seeding (so we now depend on Redis config)
166
- raw = self.redis.hgetall(cfg_key) or {}
167
-
168
- # raw typically has bytes keys/values
169
- limit = self._parse_int(
170
- raw.get(b"limit_per_window") or raw.get("limit_per_window"), limit
171
- )
172
- interval = self._parse_int(
173
- raw.get(b"interval_seconds") or raw.get("interval_seconds"), interval
174
- )
175
- jitter = self._parse_int(
176
- raw.get(b"jitter_seconds") or raw.get("jitter_seconds"), jitter
177
- )
178
-
179
- # If someone puts garbage in Redis
180
- if limit <= 0:
181
- limit = self.default_limit
182
- if interval <= 0:
183
- interval = self.default_interval
184
- if jitter < 0:
185
- jitter = 0
186
-
187
- except Exception:
188
- # Redis issue: keep defaults
189
- limit = self.default_limit
190
- interval = self.default_interval
191
- jitter = self.default_jitter
192
-
193
- cfg = (int(limit), int(interval), int(jitter))
194
- self._cached_cfg = cfg
195
- self._cached_cfg_ts = now
196
- return cfg
197
-
198
- # -------------------------
199
- # Public API
200
- # -------------------------
201
- def acquire(self, tokens: int = 1) -> AcquireResult:
202
- """
203
- Attempt to acquire tokens (default 1).
204
- If denied, returns retry_after_seconds.
205
- """
206
- tokens = int(tokens)
207
- if tokens <= 0:
208
- return AcquireResult(allowed=True, retry_after_seconds=0)
209
-
210
- now = self._now()
211
- limit, interval, jitter_max = self._load_config()
212
-
213
- window_start = self._window_start(now, interval)
214
- window_end = window_start + interval
215
- counter_key = self._counter_key(window_start)
216
-
217
- # TTL slightly larger than interval so old window keys expire
218
- ttl_seconds = max(interval * 2, 5)
219
-
220
- try:
221
- allowed = self.redis.eval(
222
- self._lua,
223
- numkeys=1,
224
- keys=[counter_key],
225
- args=[str(tokens), str(ttl_seconds), str(limit)],
226
- )
227
- except Exception:
228
- if self.deny_on_redis_error:
229
- # safest for protecting downstream (Mongo/API)
230
- retry_after = int(max(1.0, float(interval)))
231
- return AcquireResult(allowed=False, retry_after_seconds=retry_after)
232
- return AcquireResult(allowed=True, retry_after_seconds=0)
233
-
234
- if allowed == 1:
235
- return AcquireResult(allowed=True, retry_after_seconds=0)
236
-
237
- # Denied: retry after next window + jitter to avoid waves
238
- base = max(0.0, window_end - now)
239
- jitter = random.uniform(0.0, float(jitter_max))
240
- retry_after = int(max(1.0, base + jitter))
241
-
242
- return AcquireResult(allowed=False, retry_after_seconds=retry_after)
1
+ import random
2
+ import time
3
+ from dataclasses import dataclass
4
+ from typing import Optional, Tuple
5
+
6
+ from reach_commons.redis_manager import RedisManager
7
+
8
+ # Atomic fixed-window limiter (safe under high concurrency).
9
+ _LUA_WINDOW_LIMITER = """
10
+ -- KEYS[1] = window_counter_key
11
+ -- ARGV[1] = tokens_to_consume
12
+ -- ARGV[2] = ttl_seconds
13
+ -- ARGV[3] = limit_per_window
14
+
15
+ local tokens = tonumber(ARGV[1])
16
+ local ttl = tonumber(ARGV[2])
17
+ local limit = tonumber(ARGV[3])
18
+
19
+ local current = redis.call('INCRBY', KEYS[1], tokens)
20
+
21
+ -- If this is the first increment for this window, set TTL
22
+ if current == tokens then
23
+ redis.call('EXPIRE', KEYS[1], ttl)
24
+ end
25
+
26
+ if current <= limit then
27
+ return 1
28
+ else
29
+ return 0
30
+ end
31
+ """
32
+
33
+
34
+ @dataclass(frozen=True)
35
+ class AcquireResult:
36
+ allowed: bool
37
+ retry_after_seconds: int
38
+
39
+
40
+ class ReachRateLimiter:
41
+ """
42
+ ReachRateLimiter (fixed-window limiter) backed by Redis.
43
+
44
+ Configurable live via Redis (no redeploy needed).
45
+ Atomic under heavy concurrency (Lua runs inside Redis).
46
+ Returns retry_after_seconds (use it to ChangeMessageVisibility / Delay).
47
+
48
+ Redis keys used:
49
+ - Config hash:
50
+ {key_prefix}:cfg:{bucket_key}
51
+ Fields (all optional):
52
+ - limit_per_window (int)
53
+ - interval_seconds (int)
54
+ - jitter_seconds (int)
55
+
56
+ - Per-window counter:
57
+ {key_prefix}:{bucket_key}:{window_start}
58
+
59
+ Suggested defaults:
60
+ interval_seconds=2
61
+ limit_per_window=2000 (=> ~1000/s)
62
+ jitter_seconds=2 or 3
63
+ """
64
+
65
+ def __init__(
66
+ self,
67
+ redis_manager: RedisManager,
68
+ bucket_key: str,
69
+ key_prefix: str = "rate_limiter",
70
+ default_limit_per_window: int = 2000,
71
+ default_interval_seconds: int = 2,
72
+ default_jitter_seconds: Optional[int] = None,
73
+ # Cache config in-memory per Lambda container (to reduce Redis reads):
74
+ config_cache_seconds: int = 2,
75
+ # if Redis is down, deny by default to avoid stampede downstream
76
+ deny_on_redis_error: bool = True,
77
+ ):
78
+ self.redis = redis_manager
79
+ self.bucket_key = bucket_key
80
+ self.key_prefix = key_prefix
81
+
82
+ self.default_limit = int(default_limit_per_window)
83
+ self.default_interval = int(default_interval_seconds)
84
+ self.default_jitter = (
85
+ int(default_jitter_seconds)
86
+ if default_jitter_seconds is not None
87
+ else int(default_interval_seconds)
88
+ )
89
+
90
+ self.config_cache_seconds = max(0, int(config_cache_seconds))
91
+ self.deny_on_redis_error = bool(deny_on_redis_error)
92
+
93
+ self._lua = _LUA_WINDOW_LIMITER
94
+
95
+ # Per-container cache (each Lambda container caches for a short time)
96
+ self._cached_cfg: Optional[Tuple[int, int, int]] = None
97
+ self._cached_cfg_ts: float = 0.0
98
+
99
+ # -------------------------
100
+ # Redis key helpers
101
+ # -------------------------
102
+ def _cfg_key(self) -> str:
103
+ return f"{self.key_prefix}:cfg:{self.bucket_key}"
104
+
105
+ def _counter_key(self, window_start: int) -> str:
106
+ return f"{self.key_prefix}:{self.bucket_key}:{window_start}"
107
+
108
+ # -------------------------
109
+ # Time helpers
110
+ # -------------------------
111
+ def _now(self) -> float:
112
+ return time.time()
113
+
114
+ def _window_start(self, now: float, interval_seconds: int) -> int:
115
+ return int(now // interval_seconds) * interval_seconds
116
+
117
+ # -------------------------
118
+ # Config loading (from Redis hash)
119
+ # -------------------------
120
+ @staticmethod
121
+ def _parse_int(value, fallback: int) -> int:
122
+ try:
123
+ if value is None:
124
+ return fallback
125
+ if isinstance(value, (bytes, bytearray)):
126
+ value = value.decode("utf-8", errors="ignore")
127
+ return int(value)
128
+ except Exception:
129
+ return fallback
130
+
131
+ def _load_config(self) -> Tuple[int, int, int]:
132
+ """
133
+ Loads config from Redis hash:
134
+ limit_per_window, interval_seconds, jitter_seconds
135
+
136
+ Behavior:
137
+ - If config exists in Redis: read only (never overwrite).
138
+ - If config does NOT exist yet: seed Redis ONCE with defaults (so you can edit live).
139
+ - If Redis is unavailable: fallback to defaults (no writes).
140
+ """
141
+ now = self._now()
142
+
143
+ if (
144
+ self._cached_cfg is not None
145
+ and self.config_cache_seconds > 0
146
+ and (now - self._cached_cfg_ts) < self.config_cache_seconds
147
+ ):
148
+ return self._cached_cfg
149
+
150
+ limit = self.default_limit
151
+ interval = self.default_interval
152
+ jitter = self.default_jitter
153
+
154
+ try:
155
+ cfg_key = self._cfg_key()
156
+ raw = self.redis.hgetall(cfg_key) or {}
157
+
158
+ # If config was never created, seed it once with defaults
159
+ if not raw:
160
+ rc = self.redis.redis_connection
161
+ rc.hsetnx(cfg_key, "limit_per_window", str(self.default_limit))
162
+ rc.hsetnx(cfg_key, "interval_seconds", str(self.default_interval))
163
+ rc.hsetnx(cfg_key, "jitter_seconds", str(self.default_jitter))
164
+
165
+ # Re-read after seeding (so we now depend on Redis config)
166
+ raw = self.redis.hgetall(cfg_key) or {}
167
+
168
+ # raw typically has bytes keys/values
169
+ limit = self._parse_int(
170
+ raw.get(b"limit_per_window") or raw.get("limit_per_window"), limit
171
+ )
172
+ interval = self._parse_int(
173
+ raw.get(b"interval_seconds") or raw.get("interval_seconds"), interval
174
+ )
175
+ jitter = self._parse_int(
176
+ raw.get(b"jitter_seconds") or raw.get("jitter_seconds"), jitter
177
+ )
178
+
179
+ # If someone puts garbage in Redis
180
+ if limit <= 0:
181
+ limit = self.default_limit
182
+ if interval <= 0:
183
+ interval = self.default_interval
184
+ if jitter < 0:
185
+ jitter = 0
186
+
187
+ except Exception:
188
+ # Redis issue: keep defaults
189
+ limit = self.default_limit
190
+ interval = self.default_interval
191
+ jitter = self.default_jitter
192
+
193
+ cfg = (int(limit), int(interval), int(jitter))
194
+ self._cached_cfg = cfg
195
+ self._cached_cfg_ts = now
196
+ return cfg
197
+
198
+ # -------------------------
199
+ # Public API
200
+ # -------------------------
201
+ def acquire(self, tokens: int = 1) -> AcquireResult:
202
+ """
203
+ Attempt to acquire tokens (default 1).
204
+ If denied, returns retry_after_seconds.
205
+ """
206
+ tokens = int(tokens)
207
+ if tokens <= 0:
208
+ return AcquireResult(allowed=True, retry_after_seconds=0)
209
+
210
+ now = self._now()
211
+ limit, interval, jitter_max = self._load_config()
212
+
213
+ window_start = self._window_start(now, interval)
214
+ window_end = window_start + interval
215
+ counter_key = self._counter_key(window_start)
216
+
217
+ # TTL slightly larger than interval so old window keys expire
218
+ ttl_seconds = max(interval * 2, 5)
219
+
220
+ try:
221
+ allowed = self.redis.eval(
222
+ self._lua,
223
+ numkeys=1,
224
+ keys=[counter_key],
225
+ args=[str(tokens), str(ttl_seconds), str(limit)],
226
+ )
227
+ except Exception:
228
+ if self.deny_on_redis_error:
229
+ # safest for protecting downstream (Mongo/API)
230
+ retry_after = int(max(1.0, float(interval)))
231
+ return AcquireResult(allowed=False, retry_after_seconds=retry_after)
232
+ return AcquireResult(allowed=True, retry_after_seconds=0)
233
+
234
+ if allowed == 1:
235
+ return AcquireResult(allowed=True, retry_after_seconds=0)
236
+
237
+ # Denied: retry after next window + jitter to avoid waves
238
+ base = max(0.0, window_end - now)
239
+ jitter = random.uniform(0.0, float(jitter_max))
240
+ retry_after = int(max(1.0, base + jitter))
241
+
242
+ return AcquireResult(allowed=False, retry_after_seconds=retry_after)
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.4
1
+ Metadata-Version: 2.1
2
2
  Name: reach_commons
3
- Version: 0.18.40
3
+ Version: 0.18.41
4
4
  Summary: Reach Commons is a versatile utility library designed to streamline and enhance development workflows within the Reach ecosystem.
5
5
  License: MIT
6
6
  Author: Engineering
@@ -14,8 +14,6 @@ Classifier: Programming Language :: Python :: 3.9
14
14
  Classifier: Programming Language :: Python :: 3.10
15
15
  Classifier: Programming Language :: Python :: 3.11
16
16
  Classifier: Programming Language :: Python :: 3.12
17
- Classifier: Programming Language :: Python :: 3.13
18
- Classifier: Programming Language :: Python :: 3.14
19
17
  Requires-Dist: curlify (==3.0.0)
20
18
  Requires-Dist: fastapi (>=0.115.5)
21
19
  Requires-Dist: pydantic (>=2.9.2)
@@ -4,7 +4,7 @@ reach_commons/app_logging/http_logger.py,sha256=mljQCdmsmtD2HsC_gsFwZAxPlAiLPYVi
4
4
  reach_commons/app_logging/log_deprecated_endpoints.py,sha256=yXs9Jh7V0_0cMnzwXV9WRgCdFXe_tybcFE1eQl2KNC4,2020
5
5
  reach_commons/app_logging/logger.py,sha256=Iq2XTl1zLgHDmVsTMdlFadcYJOqQNhBcFSscacKs_Xs,2295
6
6
  reach_commons/app_logging/logging_config.py,sha256=Y1JaZOoQBWgQjkOqYmeDRIm0p2eCOl3yTzgsgqyqm8I,1539
7
- reach_commons/app_logging/logging_utils.py,sha256=zeDRm3DugbuqMIDstTsVyH5xU_ZV-WAcCINxb6qYdJY,491
7
+ reach_commons/app_logging/logging_utils.py,sha256=-QQ4l9CTYs-lCL-VNxGdWvPquRQtrnXWqzNfxOLO9ys,508
8
8
  reach_commons/clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
9
  reach_commons/clients/event_processor.py,sha256=KmYF0kuZxLmHQjJASXMr5jz2D_D3WNHB0c4QOlZo1_E,2024
10
10
  reach_commons/clients/hubspot.py,sha256=ntAzvwoaq78MkKaVoZ7geND-AafAzccNnJogfJDahVA,5497
@@ -15,14 +15,14 @@ reach_commons/mongo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSu
15
15
  reach_commons/mongo/customer_persistence.py,sha256=acrtpyCWr9vLVq61saJ3_Vp4DYHFBTM9XqoYC72J84w,3735
16
16
  reach_commons/mongo/customer_persistence_async.py,sha256=BmcP8TXyyQah-GYM3wcKi1baqSCycjw7UadlxGywyQM,3892
17
17
  reach_commons/mongo/validation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
- reach_commons/reach_aws/__init__.py,sha256=2b4eH12nXhkgrk_cI__xV9uD5OqK7yrirP7aHxUvUXE,83
18
+ reach_commons/reach_aws/__init__.py,sha256=xb97rt0lBd0wz9ZhULQ7YVAOceOS-AkvNVM4jfLOjYE,86
19
19
  reach_commons/reach_aws/commons.py,sha256=qQba0li75BIpmyVc0sDVrrxbtYvDCedF6RmFD-V4MYQ,259
20
- reach_commons/reach_aws/db_config.py,sha256=ORLL9vu8lHsOyGbE40HH4_QLtv6nQQAwJgURTk_huGc,3444
20
+ reach_commons/reach_aws/db_config.py,sha256=TIaZA-SspFuiIWYnYllg1-eAAISM7vBLXYqUj09HZLo,2713
21
21
  reach_commons/reach_aws/dynamo_db.py,sha256=BL3QcKzx4uZic-Ui12tln_GMSKe297FdfyIzFPE7veE,7140
22
22
  reach_commons/reach_aws/exceptions.py,sha256=x0RL5ktNtzxg0KykhEVWReBq_dEtciK6B2vMs_s4C9k,915
23
23
  reach_commons/reach_aws/firehose.py,sha256=1xFKLWMv3bNo3PPW5gtaL6NqzUDyVil6B768slj2wbY,5674
24
24
  reach_commons/reach_aws/kms.py,sha256=ZOfyJMQUgxJEojRoB7-aCxtATpNx1Ig522IUYH11NZ4,4678
25
- reach_commons/reach_aws/reach_rate_limiter.py,sha256=W0WY1WEupspwx0MvlO0F8dSzba86Ag_w-96VZ7E_l40,7959
25
+ reach_commons/reach_aws/reach_rate_limiter.py,sha256=G0An8cD0BDU7ZAuK3Xxjy2_fo7nL3ksf1LBHBlnX65s,8201
26
26
  reach_commons/reach_aws/s3.py,sha256=2MLlDNFx0SROJBpE_KjJefyrB7lMqTlrYuRhSZx4iKs,3945
27
27
  reach_commons/reach_aws/sqs.py,sha256=IKKWrd-qbhMMVYUvGbaq1ouVRdx-0u-SqwYaTcp0tWY,21645
28
28
  reach_commons/reach_base_model.py,sha256=vgdGDcZr3iXMmyRhmBhOf_LKWB_6QzT3r_Yiyo6OmEk,3009
@@ -30,6 +30,6 @@ reach_commons/redis_manager.py,sha256=yRed53ZKlbIb6rZnL53D1F_aB-xWT3nbeUP2cqYzho
30
30
  reach_commons/sms_smart_encoding.py,sha256=92y0RmZ0l4ONHpC9qeO5KfViSNq64yE2rc7lhNDSZqE,1241
31
31
  reach_commons/utils.py,sha256=dMgKIGqTgoSItuBI8oz81gKtW3qi21Jkljv9leS_V88,8475
32
32
  reach_commons/validations.py,sha256=x_lkrtlrCAJC_f7mZb19JjfKFbYlPFv-P84K_lbZyYs,1056
33
- reach_commons-0.18.40.dist-info/METADATA,sha256=OWzdxvGjULSBOrn4QTlyIpyuzKqs6-8TW1uhT7jQ23I,1965
34
- reach_commons-0.18.40.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
35
- reach_commons-0.18.40.dist-info/RECORD,,
33
+ reach_commons-0.18.41.dist-info/METADATA,sha256=RbAT9hck9WEqrlfCwigxRa1-JyL6AkcgnO0D5J-lcvw,1863
34
+ reach_commons-0.18.41.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
35
+ reach_commons-0.18.41.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 2.2.1
2
+ Generator: poetry-core 1.8.1
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any