balabs-kit 0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
bakit/__init__.py ADDED
@@ -0,0 +1 @@
1
+ __version__ = "0.0.3"
@@ -0,0 +1,3 @@
1
+ from .app import create_base_app
2
+
3
+ __all__ = ["create_base_app"]
bakit/sanic/app.py ADDED
@@ -0,0 +1,53 @@
1
+ from pathlib import Path
2
+
3
+ from orjson import dumps
4
+ from sanic import Sanic
5
+ from sanic.response import text
6
+ from sanic_ext import Extend
7
+ from tortoise.contrib.sanic import register_tortoise
8
+
9
+ from bakit import settings
10
+ from bakit.sanic.listeners import setup_cache_listener, setup_sentry_listener
11
+ from bakit.sanic.middlewares import cache_middleware_request, cache_middleware_response
12
+ from bakit.settings import APP_NAME, LOGGING_CONFIG, TORTOISE_ORM
13
+ from bakit.utils.metrics import view_metrics
14
+
15
+ STATIC_DIR = Path(__file__).resolve().parent / "static"
16
+
17
+
18
+ def create_base_app(app_name=APP_NAME, log_config=LOGGING_CONFIG, is_testing=False):
19
+ app = Sanic(app_name, strict_slashes=True, log_config=log_config, dumps=dumps)
20
+ app.config.FALLBACK_ERROR_FORMAT = "json"
21
+
22
+ app.config.CACHE_MIDDLEWARE_ENABLED = settings.CACHE_MIDDLEWARE_ENABLED
23
+
24
+ app.config.CORS_ORIGINS = settings.CORS_ORIGINS
25
+ app.config.CORS_METHODS = settings.CORS_METHODS
26
+
27
+ Extend(app)
28
+
29
+ app.static("/favicon.ico", STATIC_DIR / "favicon.png")
30
+
31
+ # listeners
32
+ app.register_listener(setup_cache_listener, "before_server_start")
33
+ app.register_listener(setup_sentry_listener, "before_server_start")
34
+
35
+ # middleware
36
+ app.register_middleware(cache_middleware_request, "request")
37
+ app.register_middleware(cache_middleware_response, "response")
38
+
39
+ # /ping/ endpoint is needed for load balancer health checks. Do not remove
40
+ @app.route("/ping/", methods=["GET"])
41
+ @view_metrics()
42
+ async def health(request):
43
+ return text("pong", status=200)
44
+
45
+ # Setup Tortoise ORM
46
+ if not is_testing:
47
+ register_tortoise(
48
+ app,
49
+ config=TORTOISE_ORM,
50
+ generate_schemas=False,
51
+ )
52
+
53
+ return app
@@ -0,0 +1,17 @@
1
+ def no_cache(handler):
2
+ """
3
+ Mark a view (function or class-based) as non-cacheable.
4
+
5
+ When applied to:
6
+ • Function views — the decorated function will never be cached.
7
+ • Class-based views (HTTPMethodView subclasses) — all HTTP methods
8
+ on the view will bypass the cache middleware.
9
+
10
+ The cache middleware checks for this marker and skips both reading from
11
+ and writing to the cache for any matching route.
12
+
13
+ Use this decorator when a route must always produce fresh output or when
14
+ caching would cause incorrect behavior.
15
+ """
16
+ handler.disable_cache = True
17
+ return handler
@@ -0,0 +1,42 @@
1
+ import logging
2
+
3
+ import sentry_sdk
4
+ from aiocache import caches
5
+ from sentry_sdk.integrations.asyncio import AsyncioIntegration
6
+ from sentry_sdk.integrations.logging import LoggingIntegration
7
+
8
+ from bakit import settings
9
+
10
+ log = logging.getLogger(__name__)
11
+
12
+
13
+ async def setup_cache_listener(app):
14
+ if not app.config.CACHE_MIDDLEWARE_ENABLED:
15
+ log.debug("Caching disabled")
16
+ app.ctx.cache = None
17
+ return
18
+
19
+ caches.set_config(
20
+ {
21
+ "default": {
22
+ "cache": "aiocache.RedisCache",
23
+ "endpoint": settings.REDIS_HOST,
24
+ "port": settings.REDIS_PORT,
25
+ "db": settings.REDIS_DB,
26
+ "timeout": 3,
27
+ "serializer": {"class": "aiocache.serializers.PickleSerializer"},
28
+ }
29
+ }
30
+ )
31
+ app.ctx.cache = caches.get("default")
32
+
33
+
34
+ async def setup_sentry_listener(_):
35
+ if not settings.SENTRY_DSN:
36
+ return
37
+
38
+ sentry_sdk.init(
39
+ dsn=settings.SENTRY_DSN,
40
+ send_default_pii=True,
41
+ integrations=[AsyncioIntegration(), LoggingIntegration(event_level="WARNING")],
42
+ )
@@ -0,0 +1,98 @@
1
+ import hashlib
2
+
3
+ from sanic import response
4
+
5
+ from bakit import settings
6
+
7
+
8
+ def _build_cache_key(request):
9
+ """
10
+ Redis-safe cache key.
11
+
12
+ Uses SHA-256 of method + path + query so we avoid unsafe chars and
13
+ overlong keys while still being deterministic.
14
+ """
15
+ raw = "|".join(
16
+ [
17
+ request.method or "",
18
+ request.path or "",
19
+ request.query_string or "",
20
+ ]
21
+ )
22
+
23
+ digest = hashlib.sha256(raw.encode("utf-8")).hexdigest()
24
+ return f"sanic-cache:{digest}"
25
+
26
+
27
+ def _is_no_cache(request):
28
+ handler = getattr(request.route, "handler", None)
29
+ if handler is None:
30
+ return False
31
+
32
+ # Function views
33
+ if getattr(handler, "disable_cache", False):
34
+ return True
35
+
36
+ # Class-based views: handler.view_class is the HTTPMethodView subclass
37
+ view_class = getattr(handler, "view_class", None)
38
+ if view_class is not None and getattr(view_class, "disable_cache", False): # noqa: SIM103
39
+ return True
40
+ return False
41
+
42
+
43
+ async def cache_middleware_request(request):
44
+ app = request.app
45
+ if not app.config.CACHE_MIDDLEWARE_ENABLED:
46
+ return
47
+
48
+ if request.method != "GET" or not getattr(request, "route", None):
49
+ return
50
+
51
+ # Skip if decorated with @no_cache
52
+ if _is_no_cache(request):
53
+ return
54
+
55
+ key = _build_cache_key(request)
56
+ cache = app.ctx.cache
57
+
58
+ cached = await cache.get(key)
59
+ if cached is not None:
60
+ # mark that this request is served from cache
61
+ request.ctx.response_from_cache = True
62
+ return response.raw(
63
+ cached["body"],
64
+ status=cached["status"],
65
+ headers=cached["headers"],
66
+ content_type=cached["content_type"],
67
+ )
68
+
69
+
70
+ async def cache_middleware_response(request, response):
71
+ # If response came from cache, do nothing
72
+ if getattr(request.ctx, "response_from_cache", False):
73
+ return
74
+
75
+ app = request.app
76
+ if not app.config.CACHE_MIDDLEWARE_ENABLED:
77
+ return
78
+
79
+ if request.method != "GET" or not getattr(request, "route", None):
80
+ return
81
+
82
+ # Skip if decorated with @no_cache
83
+ if _is_no_cache(request):
84
+ return
85
+
86
+ if response.status != 200:
87
+ return
88
+
89
+ key = _build_cache_key(request)
90
+ cache = app.ctx.cache
91
+
92
+ payload = {
93
+ "body": response.body, # bytes
94
+ "status": response.status,
95
+ "headers": list(response.headers.items()),
96
+ "content_type": response.content_type,
97
+ }
98
+ await cache.set(key, payload, ttl=settings.CACHE_MIDDLEWARE_SECONDS)
Binary file
Binary file
bakit/sanic/views.py ADDED
@@ -0,0 +1,348 @@
1
+ import math
2
+ import re
3
+ from datetime import UTC, date, datetime, timedelta
4
+ from decimal import Decimal
5
+
6
+ from sanic import response
7
+ from sanic.views import HTTPMethodView
8
+
9
+ from bakit.utils.db import fetch_all_sql, fetch_one_sql
10
+ from bakit.utils.metrics import view_metrics_context
11
+
12
+
13
+ def serialize(obj):
14
+ """Recursively serialize objects for JSON serialization.
15
+ Currently handles datetime, date and Decimal objects.
16
+ """
17
+ if isinstance(obj, datetime | date):
18
+ return obj.isoformat()
19
+ elif isinstance(obj, Decimal):
20
+ # Avoid returning scientific notation for 0 values ("0E-18" -> "0")
21
+ if obj.is_zero():
22
+ return "0"
23
+ return str(obj)
24
+ elif isinstance(obj, dict):
25
+ return {key: serialize(value) for key, value in obj.items()}
26
+ elif isinstance(obj, list):
27
+ return [serialize(item) for item in obj]
28
+ else:
29
+ return obj
30
+
31
+
32
+ class BadRequestError(Exception):
33
+ """Custom exception for bad request errors with status code 400."""
34
+
35
+ def __init__(self, message="Bad request", status_code=400):
36
+ self.message = message
37
+ self.status_code = status_code
38
+ super().__init__(self.message)
39
+
40
+
41
+ def route(bp, path):
42
+ """Custom decorator to register class-based views with routes."""
43
+
44
+ if not path.startswith("/"):
45
+ raise ValueError(f"Route path must start with '/': {path!r}")
46
+
47
+ if path != "/" and not path.endswith("/"):
48
+ raise ValueError(f"Route path must end with '/': {path!r}")
49
+
50
+ def decorator(cls):
51
+ bp.add_route(cls.as_view(), path)
52
+ return cls
53
+
54
+ return decorator
55
+
56
+
57
+ class Response:
58
+ """Structured response class for API endpoints."""
59
+
60
+ def __init__(self, data, status=200, message=None):
61
+ self.data = data
62
+ self.status = status
63
+ self.message = message
64
+
65
+ def to_dict(self):
66
+ # Serialize datetime objects in data only
67
+ serialized_data = serialize(self.data)
68
+
69
+ result = {
70
+ "data": serialized_data,
71
+ "status": self.status,
72
+ "success": 200 <= self.status < 300,
73
+ }
74
+
75
+ if self.message:
76
+ result["message"] = self.message
77
+
78
+ return result
79
+
80
+
81
+ class DaysAgoMixin:
82
+ days_ago = None
83
+ days_ago_default = 30
84
+ days_ago_required = False
85
+ days_ago_options = [1, 7, 30, 90, 365, 9999]
86
+ days_ago_dt = None
87
+ days_ago_date = None
88
+
89
+ def _handle_days_ago(self, request):
90
+ days_ago = request.args.get("days_ago", self.days_ago_default)
91
+ if days_ago == "null" or days_ago is None or days_ago == "None":
92
+ days_ago = self.days_ago_default
93
+ if days_ago:
94
+ try:
95
+ self.days_ago = int(days_ago)
96
+ self.days_ago_dt = datetime.now(UTC) - timedelta(days=self.days_ago)
97
+ self.days_ago_date = datetime.now(UTC).date() - timedelta(
98
+ days=self.days_ago
99
+ )
100
+ except (TypeError, ValueError):
101
+ raise ValueError("Wrong value for days_ago") from None
102
+
103
+ if self.days_ago_options and self.days_ago not in self.days_ago_options:
104
+ raise ValueError("Wrong value for days_ago") from None
105
+ elif self.days_ago_required:
106
+ raise ValueError("days_ago is a required param") from None
107
+
108
+
109
+ class APIView(DaysAgoMixin, HTTPMethodView):
110
+ async def dispatch_request(self, request, *args, **kwargs):
111
+ self._handle_days_ago(request)
112
+ handler = getattr(self, request.method.lower(), None)
113
+
114
+ if not handler:
115
+ return response.json(
116
+ {"message": "Method not allowed", "status": 405, "success": False},
117
+ status=405,
118
+ )
119
+
120
+ try:
121
+ result = await handler(request, *args, **kwargs)
122
+ if isinstance(result, Response):
123
+ return response.json(result.to_dict(), status=result.status)
124
+
125
+ if isinstance(result, tuple):
126
+ data, status = result
127
+ if isinstance(data, dict | list):
128
+ result = Response(data=data, status=status)
129
+ return response.json(result.to_dict(), status=result.status)
130
+ return response.text(str(data), status=status)
131
+
132
+ if isinstance(result, dict | list):
133
+ raise TypeError(
134
+ "APIView handler must return a Response object or a (data, status) "
135
+ "tuple, not a raw dict or list."
136
+ )
137
+
138
+ return result
139
+ except BadRequestError as e:
140
+ return response.json(
141
+ {"message": e.message, "status": e.status_code, "success": False},
142
+ status=e.status_code,
143
+ )
144
+
145
+
146
+ class PaginatedAPIView(APIView):
147
+ allowed_filters = ()
148
+ sortable_fields = ()
149
+ default_sort = ""
150
+ include_count = True
151
+
152
+ def get_allowed_filters(self):
153
+ return self.allowed_filters
154
+
155
+ def get_sortable_fields(self):
156
+ return self.sortable_fields
157
+
158
+ def get_base_query(self):
159
+ raise NotImplementedError
160
+
161
+ def get_count_query(self):
162
+ return None
163
+
164
+ def get_filters(self, request, values=None):
165
+ filters = []
166
+ if values is None:
167
+ values = {}
168
+
169
+ for filter_item in self.get_allowed_filters():
170
+ # Handle both simple field names and custom filter clauses
171
+ if (
172
+ "=" in filter_item
173
+ or ">" in filter_item
174
+ or "<" in filter_item
175
+ or "!=" in filter_item
176
+ or "LIKE" in filter_item.upper()
177
+ or "ILIKE" in filter_item.upper()
178
+ ):
179
+ # Custom filter clause with %(name)s pattern
180
+ filter_clause = filter_item
181
+ param_match = re.search(r"%\(([^)]+)\)s", filter_clause)
182
+ if param_match:
183
+ param_name = param_match.group(1)
184
+ if param_name in request.args:
185
+ filters.append(filter_clause)
186
+ if isinstance(request.args[param_name], list):
187
+ values[param_name] = request.args[param_name][0]
188
+ else:
189
+ values[param_name] = request.args[param_name]
190
+ if param_name in request.match_info:
191
+ filters.append(filter_clause)
192
+ values[param_name] = request.match_info[param_name]
193
+ else:
194
+ # Simple field name - default to equality
195
+ field_name = filter_item
196
+ # Extract the parameter name from the field
197
+ # (e.g., "a.network" -> "network")
198
+ param_name = field_name.split(".")[-1]
199
+ if param_name in request.args:
200
+ filter_clause = f"{field_name} = %({param_name})s"
201
+ filters.append(filter_clause)
202
+ if isinstance(request.args[param_name], list):
203
+ values[param_name] = request.args[param_name][0]
204
+ else:
205
+ values[param_name] = request.args[param_name]
206
+
207
+ return filters, values
208
+
209
+ def get_sorting(self, request):
210
+ sort_param = request.args.get("sort")
211
+ if not sort_param and not self.default_sort:
212
+ return ""
213
+
214
+ sort_param = sort_param or self.default_sort
215
+
216
+ allowed = self.get_sortable_fields()
217
+ sort_parts = []
218
+
219
+ for field in sort_param.split(","):
220
+ direction = "ASC"
221
+ if field.startswith("-"):
222
+ field = field[1:]
223
+ direction = "DESC"
224
+
225
+ for allowed_field in allowed:
226
+ if allowed_field.endswith(f".{field}") or allowed_field == field:
227
+ sort_parts.append(f"{allowed_field} {direction}")
228
+ break
229
+
230
+ parts = f" ORDER BY {', '.join(sort_parts)}" if sort_parts else ""
231
+ return f"{parts} NULLS LAST" if parts else ""
232
+
233
+ async def paginate(self, request):
234
+ # Validate page parameter
235
+ page_param = request.args.get("page", "1")
236
+ try:
237
+ page = int(page_param)
238
+ if page < 1:
239
+ raise BadRequestError("Page must be a positive integer")
240
+ except ValueError as e:
241
+ raise BadRequestError("Page must be a valid integer") from e
242
+
243
+ # Validate limit parameter
244
+ limit_param = request.args.get("limit", "20")
245
+ try:
246
+ limit = int(limit_param)
247
+ if limit < 1:
248
+ raise BadRequestError("Limit must be a positive integer")
249
+ except ValueError as e:
250
+ raise BadRequestError("Limit must be a valid integer") from e
251
+
252
+ if limit > 1000:
253
+ raise BadRequestError("Limit must be <= 1000")
254
+
255
+ offset = (page - 1) * limit
256
+
257
+ # Check if count is requested (default to True for backward compatibility)
258
+ include_count = self.include_count
259
+ if include_count is True:
260
+ include_count = request.args.get("count", "true").lower() == "true"
261
+
262
+ base_sql, values = self.get_base_query()
263
+ count_sql = self.get_count_query()
264
+
265
+ filters, filter_values = self.get_filters(request)
266
+
267
+ values.update(filter_values)
268
+
269
+ filter_clause = f" WHERE {' AND '.join(filters)}" if filters else ""
270
+ sort_clause = self.get_sorting(request)
271
+
272
+ final_query = (
273
+ f"{base_sql}{filter_clause}{sort_clause} LIMIT {limit} OFFSET {offset}"
274
+ )
275
+
276
+ rows = await fetch_all_sql(final_query, values)
277
+
278
+ # Initialize pagination metadata
279
+ pagination = {
280
+ "page": page,
281
+ "limit": limit,
282
+ "total": None,
283
+ "pages": None,
284
+ "next": None,
285
+ "previous": None,
286
+ }
287
+
288
+ # Only run count query if requested
289
+ if include_count:
290
+ if count_sql:
291
+ final_count = f"{count_sql}{filter_clause}"
292
+ else:
293
+ # Wrap the filtered base query for counting
294
+ final_count = (
295
+ f"SELECT COUNT(*) as count FROM ({base_sql}{filter_clause}) AS sub"
296
+ )
297
+
298
+ total_result = await fetch_one_sql(final_count, values)
299
+ total = total_result["count"] if total_result else 0
300
+
301
+ pagination.update(
302
+ {
303
+ "total": total,
304
+ "pages": math.ceil(total / limit),
305
+ }
306
+ )
307
+
308
+ # Add previous/next links
309
+ base_url = request.url.split("?")[0]
310
+ query_params = dict(request.args)
311
+
312
+ # Convert list values to single values for URL generation
313
+ clean_params = {}
314
+ for k, v in query_params.items():
315
+ if isinstance(v, list):
316
+ clean_params[k] = v[0] if v else ""
317
+ else:
318
+ clean_params[k] = v
319
+
320
+ def build_url(page_num):
321
+ """Helper function to build pagination URLs"""
322
+ params = clean_params.copy()
323
+ params["page"] = page_num
324
+ return f"{base_url}?{'&'.join(f'{k}={v}' for k, v in params.items())}"
325
+
326
+ # Previous page link
327
+ if page > 1:
328
+ pagination["previous"] = build_url(page - 1)
329
+
330
+ # Next page link (only if we have count info or if we got a full page of
331
+ # results)
332
+ if include_count:
333
+ if page < pagination.get("pages", 0):
334
+ pagination["next"] = build_url(page + 1)
335
+ else:
336
+ # If no count, check if we got a full page of results to determine
337
+ # if there's a next page
338
+ if len(rows) == limit:
339
+ pagination["next"] = build_url(page + 1)
340
+
341
+ return {
342
+ "results": rows,
343
+ "pagination": pagination,
344
+ }
345
+
346
+ async def get(self, request, *args, **kwargs):
347
+ with view_metrics_context(self.__class__.__name__):
348
+ return await self.paginate(request), 200
bakit/settings.py ADDED
@@ -0,0 +1,103 @@
1
+ import re
2
+ import sys
3
+
4
+ from environs import Env
5
+
6
+ env = Env()
7
+
8
+ APP_NAME = env("APP_NAME")
9
+
10
+ CORS_ORIGINS = [
11
+ re.compile(r"^http://(localhost|127\.0\.0\.1):\d+$"),
12
+ re.compile(r"^https://(\S+\.)?vercel\.app$"),
13
+ re.compile(r"^https://(\S+\.)?blockanalitica\.com$"),
14
+ ]
15
+ CORS_METHODS = ["GET", "POST", "PUT", "DELETE", "OPTIONS"]
16
+
17
+
18
+ TORTOISE_ORM = {
19
+ "connections": {
20
+ "default": env("APP_DB_URL"),
21
+ },
22
+ "apps": {
23
+ "core": {
24
+ "models": [],
25
+ "default_connection": "default",
26
+ },
27
+ },
28
+ }
29
+
30
+
31
+ SENTRY_DSN = env("SENTRY_DSN", "")
32
+
33
+ STATSD_HOST = env("STATSD_HOST", "")
34
+ STATSD_PORT = env("STATSD_PORT", default=8125)
35
+ STATSD_PREFIX = env("STATSD_PREFIX", default=APP_NAME)
36
+
37
+ REDIS_HOST = env("REDIS_HOST", "")
38
+ REDIS_PORT = env.int("REDIS_PORT", 6379)
39
+ REDIS_DB = env.int("REDIS_DB", 2)
40
+
41
+ CACHE_MIDDLEWARE_SECONDS = 5
42
+ CACHE_MIDDLEWARE_ENABLED = env.bool("CACHE_MIDDLEWARE_ENABLED", False)
43
+
44
+ APP_LOG_LEVEL = env("APP_LOG_LEVEL", default="INFO")
45
+ TORTOISE_LOG_LEVEL = env("TORTOISE_LOG_LEVEL", default="WARNING")
46
+ DEFAULT_LOG_LEVEL = env("DEFAULT_LOG_LEVEL", default="WARNING")
47
+ ARQ_LOG_LEVEL = env("ARQ_LOG_LEVEL", default="INFO")
48
+ CHAIN_HARVESTER_LOG_LEVEL = env("CHAIN_HARVESTER_LOG_LEVEL", default="WARNING")
49
+
50
+ LOGGING_CONFIG = {
51
+ "version": 1,
52
+ "disable_existing_loggers": False,
53
+ "formatters": {
54
+ "default": {
55
+ "format": (
56
+ "[%(asctime)s] %(name)s {%(module)s:%(lineno)d} "
57
+ "PID=%(process)d [%(levelname)s] - %(message)s"
58
+ ),
59
+ },
60
+ },
61
+ "handlers": {
62
+ "console": {
63
+ "level": "DEBUG",
64
+ "class": "logging.StreamHandler",
65
+ "stream": sys.stdout,
66
+ "formatter": "default",
67
+ },
68
+ },
69
+ "loggers": {
70
+ "bakit": {
71
+ "propagate": True,
72
+ "level": APP_LOG_LEVEL,
73
+ },
74
+ "core": {
75
+ "propagate": True,
76
+ "level": APP_LOG_LEVEL,
77
+ },
78
+ "tortoise": {
79
+ "propagate": True,
80
+ "level": TORTOISE_LOG_LEVEL,
81
+ },
82
+ "tortoise.db_client": {
83
+ "propagate": True,
84
+ "level": TORTOISE_LOG_LEVEL,
85
+ },
86
+ "arq": {
87
+ "propagate": True,
88
+ "level": ARQ_LOG_LEVEL,
89
+ },
90
+ "arq.worker": {
91
+ "propagate": True,
92
+ "level": ARQ_LOG_LEVEL,
93
+ },
94
+ "chain_harvester": {
95
+ "propagate": True,
96
+ "level": CHAIN_HARVESTER_LOG_LEVEL,
97
+ },
98
+ "": {
99
+ "level": DEFAULT_LOG_LEVEL,
100
+ "handlers": ["console"],
101
+ },
102
+ },
103
+ }
bakit/shell.py ADDED
@@ -0,0 +1,33 @@
1
+ # ruff: noqa: T100
2
+ import asyncio
3
+ import os
4
+
5
+ from IPython.terminal.embed import InteractiveShellEmbed
6
+ from tortoise import Tortoise
7
+
8
+
9
+ def start_ipython_shell(orm_config, extra_ns=None, banner=None):
10
+ """
11
+ Async shell helper:
12
+ - initializes Tortoise with orm_config
13
+ - starts IPython if available (with top-level await support)
14
+ - falls back to stdlib interactive shell otherwise
15
+ - always closes DB connections when done
16
+ """
17
+ asyncio.run(Tortoise.init(config=orm_config))
18
+
19
+ if banner is None:
20
+ banner = "Tortoise shell. If IPython is installed, top-level await should work."
21
+
22
+ ns = {
23
+ "Tortoise": Tortoise,
24
+ "os": os,
25
+ }
26
+ if extra_ns:
27
+ ns.update(extra_ns)
28
+
29
+ try:
30
+ shell = InteractiveShellEmbed(banner2=banner)
31
+ shell(local_ns=ns, global_ns=ns)
32
+ finally:
33
+ asyncio.run(Tortoise.close_connections())
File without changes
bakit/utils/db.py ADDED
@@ -0,0 +1,51 @@
1
+ import logging
2
+ from contextlib import asynccontextmanager
3
+
4
+ from tortoise.connection import connections
5
+
6
+ log = logging.getLogger(__name__)
7
+
8
+
9
+ def _convert_named_placeholders(sql, sql_vars):
10
+ sql_vars = sql_vars or []
11
+ if isinstance(sql_vars, dict):
12
+ # Convert named placeholders (%(key)s) to $1, $2, ...
13
+ param_count = 1
14
+ for key in sql_vars:
15
+ sql = sql.replace(f"%({key})s", f"${param_count}")
16
+ param_count += 1
17
+ sql_vars = list(sql_vars.values())
18
+ return sql, sql_vars
19
+
20
+
21
+ async def fetch_one_sql(sql, sql_vars=None, db_alias="default"):
22
+ sql, sql_vars = _convert_named_placeholders(sql, sql_vars)
23
+ conn = connections.get(db_alias)
24
+ rows = await conn.execute_query_dict(sql, sql_vars)
25
+ return rows[0] if rows else {}
26
+
27
+
28
+ async def fetch_all_sql(sql, sql_vars=None, db_alias="default"):
29
+ sql, sql_vars = _convert_named_placeholders(sql, sql_vars)
30
+ conn = connections.get(db_alias)
31
+ rows = await conn.execute_query_dict(sql, sql_vars)
32
+ return rows
33
+
34
+
35
+ @asynccontextmanager
36
+ async def streaming_fetch_all_sql(
37
+ sql, sql_vars=None, db_alias="default", prefetch=2000
38
+ ):
39
+ """
40
+ Example usage:
41
+ async with streaming_fetch_all_sql(sql) as cursor:
42
+ async for record in cursor:
43
+ print(record)
44
+ """
45
+ sql_vars = sql_vars or []
46
+ sql, sql_vars = _convert_named_placeholders(sql, sql_vars)
47
+ db_client = connections.get(db_alias)
48
+
49
+ async with db_client.acquire_connection() as con, con.transaction():
50
+ cursor = con.cursor(sql, *sql_vars, prefetch=prefetch)
51
+ yield cursor
bakit/utils/metrics.py ADDED
@@ -0,0 +1,263 @@
1
+ from contextlib import contextmanager
2
+ from functools import wraps
3
+
4
+ import statsd
5
+
6
+ from bakit import settings
7
+
8
+ statsd_client = statsd.StatsClient(
9
+ settings.STATSD_HOST,
10
+ settings.STATSD_PORT,
11
+ settings.STATSD_PREFIX,
12
+ )
13
+
14
+
15
+ def multinetworktimerd(key):
16
+ """
17
+ Decorator function to set up a timer around a function call.
18
+ This is a function only decorator!
19
+
20
+ Example:
21
+ >>> import time
22
+ >>> @metrics.multinetworktimerd('eventprocessor.sync')
23
+ >>> def sync(self):
24
+ ... time.sleep(1)
25
+
26
+ When running:
27
+ `EventProcessor(self.network, self.to_block).sync()
28
+ it will generate the following key:
29
+ - base.eventprocessor.sync
30
+ - ethereum.eventprocessor.sync
31
+ """
32
+
33
+ def decorator(func):
34
+ @wraps(func)
35
+ def wrapper(*args, **kwargs):
36
+ if not key:
37
+ raise Exception("Using an empty key name")
38
+
39
+ network = None
40
+ # Access the class instance (`self`) if the method is an instance method.
41
+ cls = args[0] if args else None
42
+ if cls:
43
+ network = getattr(cls, "network", None)
44
+
45
+ if not network:
46
+ raise AttributeError(
47
+ f"The decorated method '{func.__name__}' must have a 'network' "
48
+ "attribute. Ensure the class or instance has a 'network' property "
49
+ "defined."
50
+ )
51
+
52
+ with timer(f"{network}.{key}"):
53
+ return func(*args, **kwargs)
54
+
55
+ return wrapper
56
+
57
+ return decorator
58
+
59
+
60
+ def timerd(key):
61
+ """
62
+ Decorator function to set up a timer around a function call.
63
+ This is a function only decorator!
64
+
65
+ Example:
66
+ >>> import time
67
+ >>> @metrics.timerd('time_sleep_key')
68
+ >>> def timed_function():
69
+ ... time.sleep(1)
70
+ """
71
+
72
+ def decorator(func):
73
+ @wraps(func)
74
+ def wrapper(*args, **kwargs):
75
+ if not key:
76
+ raise Exception("Using an empty key name")
77
+ with timer(key):
78
+ return func(*args, **kwargs)
79
+
80
+ return wrapper
81
+
82
+ return decorator
83
+
84
+
85
+ @contextmanager
86
+ def timer(key):
87
+ """Metrics wrapper for Statsd Timer Object
88
+
89
+ >>> import time
90
+ >>> with metrics.timer('unique_key'):
91
+ ... time.sleep(1)
92
+ """
93
+ statsd_timer = statsd_client.timer(str(key))
94
+ statsd_timer.start()
95
+ try:
96
+ yield
97
+ finally:
98
+ statsd_timer.stop()
99
+
100
+
101
+ def raw_timer(key, value):
102
+ """Send a timing directly to Graphite, without need to call start() and stop().
103
+
104
+ :keyword value: The time in seconds, it must be an int or a float
105
+
106
+ >>> # Got a timing from frontend!
107
+ >>> metrics.raw_timer('unique_key', 31.3)
108
+ """
109
+
110
+ # Validating "value" to be an int or a float
111
+ if not isinstance(value, int | float):
112
+ return None
113
+
114
+ return statsd_client.timing(str(key), value)
115
+
116
+
117
+ def increment(key, delta=1, subname=None):
118
+ """Increment the counter identified with `key` and `subname` with `delta`
119
+
120
+ >>> # After a user logs in....
121
+ >>> metrics.increment('auth.successful_login', 1)
122
+
123
+ :keyword delta: The delta to add to the counter, default is 1
124
+ :keyword subname: The subname to report the data to (appended to the
125
+ client name). Like "hits", or "sales".
126
+ """
127
+ name = f"counters.{key}"
128
+ if subname:
129
+ name += f".{subname}"
130
+
131
+ return statsd_client.incr(name, delta)
132
+
133
+
134
+ def decrement(key, delta=1, subname=None):
135
+ """Decrement the counter identified with `key` and `subname` with `delta`
136
+
137
+ >>> # Users that log out...
138
+ >>> metrics.decrement('auth.connected_users', 1)
139
+
140
+ :keyword delta: The delta to substract from the counter, default is 1
141
+ :keyword subname: The subname to report the data to (appended to the
142
+ client name)
143
+ """
144
+
145
+ name = f"counters.{key}"
146
+ if subname:
147
+ name += f".{subname}"
148
+
149
+ return statsd_client.decr(name, delta)
150
+
151
+
152
+ def gauge(key, value=1, subname=None):
153
+ """Set the value of the gauge identified with `key` and `subname` with `value`
154
+
155
+ :keyword value: The value to set the gauge at, default is 1
156
+ :keyword subname: The subname to report the data to (appended to the
157
+ client name)
158
+ """
159
+
160
+ name = key
161
+ if subname:
162
+ name += f".{subname}"
163
+
164
+ # We never use the relative changes behaviour so attempt to always make it do the
165
+ # set value behaviour instead.
166
+ if value < 0:
167
+ statsd_client.gauge(name, 0)
168
+ return statsd_client.gauge(name, value)
169
+
170
+
171
+ def function_long_name(func, extra=None):
172
+ if extra:
173
+ return ".".join([func.__module__, func.__name__, extra])
174
+ else:
175
+ return ".".join([func.__module__, func.__name__])
176
+
177
+
178
+ def auto_named_statsd_timer(function_to_decorate):
179
+ call_name = function_long_name(function_to_decorate, "call")
180
+
181
+ @wraps(function_to_decorate)
182
+ def incr_and_call(*args, **kwargs):
183
+ statsd_client.incr(call_name)
184
+ return function_to_decorate(*args, **kwargs)
185
+
186
+ timer_name = function_long_name(function_to_decorate, "time")
187
+ named_decorator = statsd_client.timer(timer_name)
188
+
189
+ return named_decorator(incr_and_call)
190
+
191
+
192
+ @contextmanager
193
+ def view_metrics_context(endpoint_name=None, instance=None):
194
+ """
195
+ Context manager for view methods to measure hit count and response time.
196
+
197
+ Args:
198
+ endpoint_name: Optional custom name for the endpoint. If not provided,
199
+ uses the instance's class name and current context.
200
+ instance: The instance (self) to get class name from.
201
+
202
+ Example:
203
+ async def get(self, request):
204
+ with view_metrics_context(instance=self):
205
+ # Your code here
206
+ return Response({"data": result})
207
+
208
+ with view_metrics_context("custom_endpoint"):
209
+ # Your code here
210
+ pass
211
+ """
212
+ if endpoint_name:
213
+ metric_base = f"views.{endpoint_name}"
214
+ else:
215
+ if instance:
216
+ cls_name = instance.__class__.__name__
217
+ metric_base = f"views.{cls_name}.context"
218
+ else:
219
+ metric_base = "views.unknown.context"
220
+
221
+ statsd_client.incr(f"{metric_base}.hits")
222
+
223
+ with statsd_client.timer(f"{metric_base}.response_time"):
224
+ yield
225
+
226
+
227
+ def view_metrics(endpoint_name=None):
228
+ """
229
+ Decorator for view methods to measure hit count and response time.
230
+
231
+ Args:
232
+ endpoint_name: Optional custom name for the endpoint. If not provided,
233
+ uses the class name and method name.
234
+
235
+ Example:
236
+ @view_metrics()
237
+ async def get(self, request):
238
+ ...
239
+
240
+ @view_metrics("custom_endpoint")
241
+ async def post(self, request):
242
+ ...
243
+ """
244
+
245
+ def decorator(func):
246
+ @wraps(func)
247
+ async def wrapper(*args, **kwargs):
248
+ if endpoint_name:
249
+ metric_base = f"views.{endpoint_name}"
250
+ else:
251
+ cls_name = args[0].__class__.__name__ if args else "unknown"
252
+ method_name = func.__name__
253
+ metric_base = f"views.{cls_name}.{method_name}"
254
+
255
+ statsd_client.incr(f"{metric_base}.hits")
256
+
257
+ # Time the function execution
258
+ with statsd_client.timer(f"{metric_base}.response_time"):
259
+ return await func(*args, **kwargs)
260
+
261
+ return wrapper
262
+
263
+ return decorator
@@ -0,0 +1,55 @@
1
+ def generate_kpi_card_changes(column_name, usd_change=False, percentage_change=False):
2
+ """
3
+ Generate SQL template for calculating KPI changes and percentage changes.
4
+
5
+ Generates SQL snippets that calculate:
6
+ - Current value
7
+ - Absolute change (current - previous)
8
+ - Percentage change (if percentage_change is True)
9
+ - USD changes (if usd_change is True), also generates the same calculations for USD
10
+ values.
11
+
12
+ Args:
13
+ column_name (str): Name of the column to generate changes for
14
+ usd_change (bool, optional): Whether to include USD calculations. Defaults to
15
+ False.
16
+ percentage_change (bool, optional): Whether to include percentage change
17
+ calculations. Defaults to False.
18
+
19
+ Returns:
20
+ str: SQL template string with calculations for changes and percentage changes
21
+
22
+ Example:
23
+ >>> generate_kpi_card_changes('total_supply')
24
+ # Returns SQL template for total_supply and total_supply_usd changes
25
+ """
26
+
27
+ template = f"""
28
+ , c.{column_name}
29
+ , c.{column_name} - COALESCE(p.{column_name}, 0) AS {column_name}_change
30
+ """
31
+
32
+ if percentage_change:
33
+ template += f"""
34
+ , (
35
+ (c.{column_name} - COALESCE(p.{column_name}, 0))
36
+ / COALESCE(NULLIF(p.{column_name}, 0), NULLIF(c.{column_name}, 0))
37
+ ) AS {column_name}_change_percentage
38
+ """
39
+
40
+ if usd_change:
41
+ template += f"""
42
+ , c.{column_name}_usd
43
+ , c.{column_name}_usd - COALESCE(p.{column_name}_usd, 0)
44
+ AS {column_name}_usd_change
45
+ """
46
+ if percentage_change:
47
+ template += f"""
48
+ , (
49
+ (c.{column_name}_usd - COALESCE(p.{column_name}_usd, 0))
50
+ / COALESCE(NULLIF(p.{column_name}_usd, 0), NULLIF(
51
+ c.{column_name}_usd, 0))
52
+ ) AS {column_name}_usd_change_percentage
53
+ """
54
+
55
+ return template
@@ -0,0 +1,30 @@
1
+ Metadata-Version: 2.4
2
+ Name: balabs-kit
3
+ Version: 0.0.3
4
+ Author-email: tsifrer <3967564+tsifrer@users.noreply.github.com>
5
+ License-Expression: Apache-2.0
6
+ Requires-Python: >=3.13
7
+ Requires-Dist: aiocache[redis]>=0.12.3
8
+ Requires-Dist: asyncclick>=8.3.0.7
9
+ Requires-Dist: asyncpg>=0.31.0
10
+ Requires-Dist: environs>=14.5.0
11
+ Requires-Dist: ipython>=9.8.0
12
+ Requires-Dist: nest-asyncio>=1.6.0
13
+ Requires-Dist: orjson>=3.11.5
14
+ Requires-Dist: sentry-sdk>=2.47.0
15
+ Requires-Dist: statsd>=4.0.1
16
+ Requires-Dist: tortoise-orm>=0.25.1
17
+ Requires-Dist: uvloop>=0.22.1
18
+ Provides-Extra: arq
19
+ Requires-Dist: arq<1.0.0,>=0.26.3; extra == 'arq'
20
+ Requires-Dist: tortoise-plastron>=0.1.1; extra == 'arq'
21
+ Provides-Extra: sanic
22
+ Requires-Dist: sanic-ext>=24.12.0; extra == 'sanic'
23
+ Requires-Dist: sanic>=25.3.0; extra == 'sanic'
24
+ Description-Content-Type: text/markdown
25
+
26
+ # BA Kit
27
+
28
+ ## bakit dependency installation for local development and testing
29
+
30
+ `uv sync --all-extras --dev`
@@ -0,0 +1,18 @@
1
+ bakit/__init__.py,sha256=4GZKi13lDTD25YBkGakhZyEQZWTER_OWQMNPoH_UM2c,22
2
+ bakit/settings.py,sha256=AusdP9EtOBDBuNvcK3gieKyKTD_f6lAz44-AQxvR73s,2612
3
+ bakit/shell.py,sha256=jW7cnGbJc60sOmA_jgnCWKaE9Pm6QOvoYtTsPQVuma4,889
4
+ bakit/sanic/__init__.py,sha256=wxdGII3_XK2gmES3h5Kt28CVgND-kWvfam-e3IK3IhU,64
5
+ bakit/sanic/app.py,sha256=M6vF1480Ya9Xon-9YbWJCluyrIAQoQga_SrDo2814NM,1737
6
+ bakit/sanic/decorators.py,sha256=R4qyynBiB6BGoFzoCOYxtqGyjQ-VWY83A7T62XMXNmo,630
7
+ bakit/sanic/listeners.py,sha256=U828FErgb4IzTPswrOtHiaBSmudkIhcbBESj4LN3CA8,1111
8
+ bakit/sanic/middlewares.py,sha256=yNuZ_8JzdzJsEEv8adA8DZwMgfARpKIGbXhNRFrcolw,2536
9
+ bakit/sanic/views.py,sha256=0V2rqVZ6zrCkodNvu0AH4VYUn1RHiLYJMTdiX3rLKWM,11934
10
+ bakit/sanic/static/favicon.ico,sha256=O053tj32xBSw6b3qd7LjQe0-PoJ0oxrHvPX7pbz6csE,15406
11
+ bakit/sanic/static/favicon.png,sha256=QBRACJH47kTn6J42ta2r-tY4eQBLer2vqDGxF4ZPJi4,223
12
+ bakit/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
+ bakit/utils/db.py,sha256=xknEFED-kZisn4sPZP7IlSk1-5-0LaGGtUXqqdoFfSw,1601
14
+ bakit/utils/metrics.py,sha256=osCYEllEHe7aPxJBNvdRO-O7UaRuIMyxWfm_W0FNPgQ,7306
15
+ bakit/utils/sql_helpers.py,sha256=JhgA7WPZQb-LFfjW_fXYRnc2yS0p-kxGzSNaX9cSULE,2046
16
+ balabs_kit-0.0.3.dist-info/METADATA,sha256=XCjrGlkS6qTsv4chWeyxG_hfrIAqzOlnNa5P_xcVUcg,930
17
+ balabs_kit-0.0.3.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
18
+ balabs_kit-0.0.3.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.28.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any