balabs-kit 0.0.3__py3-none-any.whl → 0.0.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bakit/__init__.py +7 -1
- bakit/arq/__init__.py +5 -0
- bakit/arq/decorators.py +92 -0
- bakit/arq/task_loader.py +44 -0
- bakit/arq/worker.py +70 -0
- bakit/cli.py +177 -0
- bakit/config.py +126 -0
- bakit/init.py +38 -0
- bakit/sanic/app.py +29 -4
- bakit/settings.py +4 -101
- bakit/shell.py +8 -5
- bakit/utils/db.py +0 -3
- bakit/utils/discord.py +27 -0
- bakit/utils/metrics.py +24 -17
- {balabs_kit-0.0.3.dist-info → balabs_kit-0.0.8.dist-info}/METADATA +5 -3
- balabs_kit-0.0.8.dist-info/RECORD +26 -0
- balabs_kit-0.0.3.dist-info/RECORD +0 -18
- {balabs_kit-0.0.3.dist-info → balabs_kit-0.0.8.dist-info}/WHEEL +0 -0
bakit/__init__.py
CHANGED
bakit/arq/__init__.py
ADDED
bakit/arq/decorators.py
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import logging
|
|
3
|
+
from contextlib import contextmanager
|
|
4
|
+
from functools import wraps
|
|
5
|
+
|
|
6
|
+
import sentry_sdk
|
|
7
|
+
|
|
8
|
+
from bakit import settings
|
|
9
|
+
from bakit.utils.metrics import timer
|
|
10
|
+
|
|
11
|
+
log = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@contextmanager
|
|
15
|
+
def _handle_exceptions():
|
|
16
|
+
"""
|
|
17
|
+
Decorator to handle ARQ job timeouts and report them to Sentry.
|
|
18
|
+
|
|
19
|
+
When ARQ times out a job, it raises asyncio.CancelledError. This decorator
|
|
20
|
+
catches that exception, reports it to Sentry with appropriate tags, and
|
|
21
|
+
re-raises it to maintain the expected behavior.
|
|
22
|
+
|
|
23
|
+
This is needed because ARQ's CancelledError also cancels sentry's coroutine that
|
|
24
|
+
would send an error to sentry
|
|
25
|
+
"""
|
|
26
|
+
try:
|
|
27
|
+
yield
|
|
28
|
+
except asyncio.CancelledError as e:
|
|
29
|
+
sentry_sdk.capture_exception(e)
|
|
30
|
+
raise
|
|
31
|
+
except Exception:
|
|
32
|
+
raise
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def task(name=None):
|
|
36
|
+
"""
|
|
37
|
+
Decorator for async task functions
|
|
38
|
+
|
|
39
|
+
This decorator wraps async functions to automatically:
|
|
40
|
+
- Track execution time using the timer context manager
|
|
41
|
+
- Handle exceptions (including asyncio.CancelledError) via handle_exceptions
|
|
42
|
+
- Handle enabling/disabling the task via redis
|
|
43
|
+
|
|
44
|
+
The decorator is designed for use with background task systems like ARQ, where
|
|
45
|
+
timing and proper exception handling are crucial for monitoring and reliability.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
name (str, optional): Custom name for the task. If not provided, uses the
|
|
49
|
+
function's __name__ attribute. This name is used for metrics labeling
|
|
50
|
+
as "task.{name}".
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
callable: The decorated async function with timing and exception handling.
|
|
54
|
+
|
|
55
|
+
Usage:
|
|
56
|
+
# Without parameters (uses function name)
|
|
57
|
+
@task
|
|
58
|
+
async def my_background_task():
|
|
59
|
+
await some_async_operation()
|
|
60
|
+
|
|
61
|
+
# With custom name
|
|
62
|
+
@task(name="custom_task_name")
|
|
63
|
+
async def my_background_task():
|
|
64
|
+
await some_async_operation()
|
|
65
|
+
"""
|
|
66
|
+
|
|
67
|
+
def decorator(func):
|
|
68
|
+
task_name = name or func.__name__
|
|
69
|
+
|
|
70
|
+
@wraps(func)
|
|
71
|
+
async def wrapper(ctx, *args, **kwargs):
|
|
72
|
+
redis = ctx.get("redis")
|
|
73
|
+
if redis:
|
|
74
|
+
task_disabled = await redis.sismember(
|
|
75
|
+
settings.ARQ_CRON_DISABLED_KEY, task_name
|
|
76
|
+
)
|
|
77
|
+
if task_disabled:
|
|
78
|
+
log.info(f"Task {task_name} is disabled. Skipping.")
|
|
79
|
+
return
|
|
80
|
+
|
|
81
|
+
with timer(f"task.{task_name}"), _handle_exceptions():
|
|
82
|
+
return await func(ctx, *args, **kwargs)
|
|
83
|
+
|
|
84
|
+
return wrapper
|
|
85
|
+
|
|
86
|
+
# support both @task and @task("name")
|
|
87
|
+
if callable(name):
|
|
88
|
+
func = name
|
|
89
|
+
name = None
|
|
90
|
+
return decorator(func)
|
|
91
|
+
|
|
92
|
+
return decorator
|
bakit/arq/task_loader.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
from importlib import import_module
|
|
2
|
+
|
|
3
|
+
from arq import func
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def _load_task_modules(task_packages):
|
|
7
|
+
modules = []
|
|
8
|
+
for pkg in task_packages:
|
|
9
|
+
modules.append(import_module(f"{pkg}.tasks"))
|
|
10
|
+
return modules
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def _normalize_fn_to_dotted(fn):
|
|
14
|
+
if isinstance(fn, str):
|
|
15
|
+
return fn
|
|
16
|
+
return "{}.{}".format(fn.__module__, getattr(fn, "__name__", fn.__class__.__name__))
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def collect_cron_jobs_and_functions(task_packages):
|
|
20
|
+
cron_jobs = []
|
|
21
|
+
functions = []
|
|
22
|
+
|
|
23
|
+
for module in _load_task_modules(task_packages):
|
|
24
|
+
# also allow optional explicit FUNCTIONS = [callable, ...]
|
|
25
|
+
extra_funcs = getattr(module, "FUNCTIONS", [])
|
|
26
|
+
for fn in extra_funcs:
|
|
27
|
+
# replace name so if we have two functions with the same name in different
|
|
28
|
+
# modules, it still works
|
|
29
|
+
functions.append(func(_normalize_fn_to_dotted(fn)))
|
|
30
|
+
|
|
31
|
+
# Each tasks.py should define CRON_JOBS = [cron(...), ...]
|
|
32
|
+
jobs = getattr(module, "CRON_JOBS", [])
|
|
33
|
+
|
|
34
|
+
for job in jobs:
|
|
35
|
+
# Set ID of the job, used to enforce job uniqueness
|
|
36
|
+
if not job.job_id:
|
|
37
|
+
job.job_id = job.name
|
|
38
|
+
|
|
39
|
+
# replace name so if we have two functions with the same name in different
|
|
40
|
+
# modules, it still works
|
|
41
|
+
job.name = f"cron:{_normalize_fn_to_dotted(job.coroutine)}"
|
|
42
|
+
cron_jobs.append(job)
|
|
43
|
+
|
|
44
|
+
return cron_jobs, functions
|
bakit/arq/worker.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
# ruff: noqa: E402
|
|
2
|
+
import asyncio
|
|
3
|
+
import contextlib
|
|
4
|
+
import logging
|
|
5
|
+
|
|
6
|
+
import sentry_sdk
|
|
7
|
+
from sentry_sdk.integrations.logging import LoggingIntegration
|
|
8
|
+
from tortoise import Tortoise
|
|
9
|
+
|
|
10
|
+
from bakit import settings
|
|
11
|
+
|
|
12
|
+
sentry_sdk.init(
|
|
13
|
+
dsn=settings.SENTRY_DSN,
|
|
14
|
+
max_breadcrumbs=20,
|
|
15
|
+
integrations=[
|
|
16
|
+
LoggingIntegration(level=logging.INFO, event_level=logging.WARNING),
|
|
17
|
+
],
|
|
18
|
+
)
|
|
19
|
+
# Import all bakit stuff after settings and after Sentry has been initialized to
|
|
20
|
+
# captcure any errors/warning in sentry if they happen during imports
|
|
21
|
+
|
|
22
|
+
from bakit.utils import metrics
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
async def _report_queue_size(ctx):
|
|
26
|
+
redis = ctx["redis"]
|
|
27
|
+
queue = redis.default_queue_name
|
|
28
|
+
sanitized_queue_name = queue.replace(".", "_").replace(":", "_")
|
|
29
|
+
metric_name = f"arq.queue.size.{sanitized_queue_name}"
|
|
30
|
+
while True:
|
|
31
|
+
n = await redis.zcard(queue) # ARQ stores jobs in a ZSET per queue
|
|
32
|
+
metrics.gauge(metric_name, n)
|
|
33
|
+
await asyncio.sleep(10)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
async def on_startup(ctx):
|
|
37
|
+
await Tortoise.init(config=settings.TORTOISE_ORM)
|
|
38
|
+
ctx["metrics_task"] = asyncio.create_task(_report_queue_size(ctx))
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
async def on_shutdown(ctx):
|
|
42
|
+
await Tortoise.close_connections()
|
|
43
|
+
|
|
44
|
+
metrics_task = ctx.get("metrics_task")
|
|
45
|
+
if metrics_task:
|
|
46
|
+
metrics_task.cancel()
|
|
47
|
+
with contextlib.suppress(asyncio.CancelledError):
|
|
48
|
+
await metrics_task
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def build_worker(config):
|
|
52
|
+
if not isinstance(config, dict):
|
|
53
|
+
raise TypeError("Config must be a dictionary")
|
|
54
|
+
|
|
55
|
+
cfg = {
|
|
56
|
+
"on_startup": on_startup,
|
|
57
|
+
"on_shutdown": on_shutdown,
|
|
58
|
+
# Don't keep result of the job as we use unique job_id on cron jobs to avoid
|
|
59
|
+
# scheduling the same job multiple times at the same time. If keep_result is
|
|
60
|
+
# set, and the job fails, the new job will not be able to be rescheduled until
|
|
61
|
+
# keep_result expires
|
|
62
|
+
"keep_result": 0,
|
|
63
|
+
"max_jobs": 6,
|
|
64
|
+
"queue_read_limit": 12, # keep at 2x max_jobs
|
|
65
|
+
"job_timeout": 15 * 60, # max time per job,
|
|
66
|
+
"job_completion_wait": 8 * 60, # wait 8 minutes for completion
|
|
67
|
+
"graceful_shutdown_timeout": 9 * 60, # total shutdown time
|
|
68
|
+
}
|
|
69
|
+
cfg.update(config)
|
|
70
|
+
return type("ARQWorker", (), cfg)
|
bakit/cli.py
ADDED
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
import importlib
|
|
2
|
+
import pkgutil
|
|
3
|
+
import time
|
|
4
|
+
from datetime import UTC, datetime
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
import asyncclick as click
|
|
8
|
+
from discord import Color, Embed
|
|
9
|
+
from tortoise import Tortoise
|
|
10
|
+
|
|
11
|
+
from bakit import settings
|
|
12
|
+
from bakit.utils.discord import send_webhook_embed
|
|
13
|
+
|
|
14
|
+
__all__ = ["BakitCommand", "BakitGroup", "autodiscover_and_attach"]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
async def _send_discord_embed(embed):
|
|
18
|
+
embed.set_footer(text=f"{settings.APP_NAME} one-off tasks")
|
|
19
|
+
await send_webhook_embed(settings.WATCHTOWER_WEBHOOK_URL, embed)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
async def _run_one_off_with_notifications(ctx, invoke_fn):
|
|
23
|
+
cmd = "{} {}".format(
|
|
24
|
+
ctx.command_path,
|
|
25
|
+
" ".join(f"--{k}={v}" for k, v in ctx.params.items()) if ctx.params else "",
|
|
26
|
+
)
|
|
27
|
+
click.echo(cmd)
|
|
28
|
+
|
|
29
|
+
def _make_embed(text, color):
|
|
30
|
+
description = f"{text} - `{cmd}`"
|
|
31
|
+
return Embed(description=description, color=color, timestamp=datetime.now(UTC))
|
|
32
|
+
|
|
33
|
+
await _send_discord_embed(_make_embed("🟣 Task starting", Color.purple()))
|
|
34
|
+
started = time.monotonic()
|
|
35
|
+
try:
|
|
36
|
+
result = await invoke_fn(ctx)
|
|
37
|
+
except Exception as e:
|
|
38
|
+
await _send_discord_embed(_make_embed(f"🔴 Task failed: {e}", Color.red()))
|
|
39
|
+
raise
|
|
40
|
+
else:
|
|
41
|
+
elapsed = time.monotonic() - started
|
|
42
|
+
await _send_discord_embed(
|
|
43
|
+
_make_embed(f"🟢 Task done in {elapsed:.1f}s", Color.green())
|
|
44
|
+
)
|
|
45
|
+
return result
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class BakitCommand(click.Command):
|
|
49
|
+
async def invoke(self, ctx):
|
|
50
|
+
await Tortoise.init(config=settings.TORTOISE_ORM)
|
|
51
|
+
|
|
52
|
+
try:
|
|
53
|
+
if settings.IS_ONE_OFF_CMD:
|
|
54
|
+
return await _run_one_off_with_notifications(ctx, super().invoke)
|
|
55
|
+
|
|
56
|
+
else:
|
|
57
|
+
return await super().invoke(ctx)
|
|
58
|
+
finally:
|
|
59
|
+
await Tortoise.close_connections()
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class BakitGroup(click.Group):
|
|
63
|
+
command_class = BakitCommand
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def _wrap_tree(cmd):
|
|
67
|
+
# If it's already wrapped, return as-is
|
|
68
|
+
if isinstance(cmd, (BakitCommand, BakitGroup)):
|
|
69
|
+
return cmd
|
|
70
|
+
|
|
71
|
+
# Wrap groups by rebuilding them as BakitGroup
|
|
72
|
+
if isinstance(cmd, click.Group):
|
|
73
|
+
new_grp = BakitGroup(
|
|
74
|
+
name=cmd.name,
|
|
75
|
+
commands={},
|
|
76
|
+
callback=cmd.callback,
|
|
77
|
+
params=list(cmd.params),
|
|
78
|
+
help=cmd.help,
|
|
79
|
+
epilog=cmd.epilog,
|
|
80
|
+
short_help=cmd.short_help,
|
|
81
|
+
options_metavar=cmd.options_metavar,
|
|
82
|
+
add_help_option=cmd.add_help_option,
|
|
83
|
+
no_args_is_help=cmd.no_args_is_help,
|
|
84
|
+
hidden=getattr(cmd, "hidden", False),
|
|
85
|
+
deprecated=getattr(cmd, "deprecated", False),
|
|
86
|
+
invoke_without_command=getattr(cmd, "invoke_without_command", False),
|
|
87
|
+
context_settings=getattr(cmd, "context_settings", None),
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
for name, sub in cmd.commands.items():
|
|
91
|
+
new_grp.add_command(_wrap_tree(sub), name=name)
|
|
92
|
+
|
|
93
|
+
return new_grp
|
|
94
|
+
|
|
95
|
+
# Wrap leaf commands by rebuilding them as BakitCommand (preserves args/options)
|
|
96
|
+
if isinstance(cmd, click.Command):
|
|
97
|
+
return BakitCommand(
|
|
98
|
+
name=cmd.name,
|
|
99
|
+
callback=cmd.callback,
|
|
100
|
+
params=list(cmd.params),
|
|
101
|
+
help=cmd.help,
|
|
102
|
+
epilog=cmd.epilog,
|
|
103
|
+
short_help=cmd.short_help,
|
|
104
|
+
options_metavar=cmd.options_metavar,
|
|
105
|
+
add_help_option=cmd.add_help_option,
|
|
106
|
+
no_args_is_help=cmd.no_args_is_help,
|
|
107
|
+
hidden=getattr(cmd, "hidden", False),
|
|
108
|
+
deprecated=getattr(cmd, "deprecated", False),
|
|
109
|
+
context_settings=getattr(cmd, "context_settings", None),
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
return cmd
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _make_wrapper_command(mod, cmd_name):
|
|
116
|
+
cmd = getattr(mod, "cmd", None)
|
|
117
|
+
|
|
118
|
+
if not cmd:
|
|
119
|
+
return
|
|
120
|
+
|
|
121
|
+
if not isinstance(cmd, click.core.BaseCommand):
|
|
122
|
+
raise TypeError(
|
|
123
|
+
f"Invalid `cmd` in module '{mod.__name__}': expected a Click command/group "
|
|
124
|
+
f"instance, got {type(cmd).__name__}.\n"
|
|
125
|
+
"Fix: define `cmd` using @click.command() or @click.group()."
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
# Ensure the command has a stable name when mounted under the scripts group
|
|
129
|
+
if not getattr(cmd, "name", None):
|
|
130
|
+
cmd.name = cmd_name
|
|
131
|
+
|
|
132
|
+
return _wrap_tree(cmd)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def _load_scripts_group(pkg_name):
|
|
136
|
+
# Create the group from package name`myproject package`
|
|
137
|
+
grp = click.Group(name=pkg_name)
|
|
138
|
+
|
|
139
|
+
scripts_pkg = f"{pkg_name}.scripts"
|
|
140
|
+
try:
|
|
141
|
+
scripts_mod = importlib.import_module(scripts_pkg)
|
|
142
|
+
except ModuleNotFoundError:
|
|
143
|
+
# package exists but no scripts package
|
|
144
|
+
return None
|
|
145
|
+
|
|
146
|
+
# Discover script modules under <package>/scripts/*.py
|
|
147
|
+
for m in pkgutil.iter_modules(scripts_mod.__path__):
|
|
148
|
+
if m.ispkg or m.name.startswith("_"):
|
|
149
|
+
continue
|
|
150
|
+
|
|
151
|
+
full_name = f"{scripts_pkg}.{m.name}"
|
|
152
|
+
mod = importlib.import_module(full_name)
|
|
153
|
+
cmd = _make_wrapper_command(mod, cmd_name=m.name)
|
|
154
|
+
if cmd:
|
|
155
|
+
grp.add_command(cmd, name=m.name)
|
|
156
|
+
|
|
157
|
+
return grp
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def _iter_immediate_packages(root_dir):
|
|
161
|
+
# Find immediate subfolders of the package containing cli.py
|
|
162
|
+
for p in root_dir.iterdir():
|
|
163
|
+
if not p.is_dir():
|
|
164
|
+
continue
|
|
165
|
+
if p.name.startswith(("_", ".")):
|
|
166
|
+
continue
|
|
167
|
+
if (p / "__init__.py").exists() and (p / "scripts/").exists():
|
|
168
|
+
yield p.name
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def autodiscover_and_attach(base_file, cli):
|
|
172
|
+
root_dir = Path(base_file).resolve().parent
|
|
173
|
+
|
|
174
|
+
for pkg_name in _iter_immediate_packages(root_dir):
|
|
175
|
+
grp = _load_scripts_group(pkg_name)
|
|
176
|
+
if grp:
|
|
177
|
+
cli.add_command(grp, name=pkg_name)
|
bakit/config.py
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from copy import deepcopy
|
|
3
|
+
from types import SimpleNamespace
|
|
4
|
+
|
|
5
|
+
_APP_NAME_RE = re.compile(r"^[A-Za-z]+$")
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class Settings:
|
|
9
|
+
"""
|
|
10
|
+
Settings proxy so it doesn't matter where and how settings are imported
|
|
11
|
+
they still point to the correct settings after they're initialized
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
_wrapped = None
|
|
15
|
+
|
|
16
|
+
def init(self, real_settings):
|
|
17
|
+
self._wrapped = real_settings
|
|
18
|
+
|
|
19
|
+
def __getattr__(self, name):
|
|
20
|
+
if self._wrapped is None:
|
|
21
|
+
raise RuntimeError("Settings not initialized")
|
|
22
|
+
return getattr(self._wrapped, name)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
settings = Settings()
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def load_settings(env, configure_settings):
|
|
29
|
+
base = _default_settings(env)
|
|
30
|
+
base_copy = deepcopy(base)
|
|
31
|
+
|
|
32
|
+
if configure_settings is not None:
|
|
33
|
+
final_dict = configure_settings(base_copy, env)
|
|
34
|
+
if final_dict is None:
|
|
35
|
+
raise RuntimeError("configure_settings must return a dict of settings")
|
|
36
|
+
else:
|
|
37
|
+
final_dict = base_copy
|
|
38
|
+
|
|
39
|
+
settings.init(SimpleNamespace(**final_dict))
|
|
40
|
+
return settings
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _default_settings(env):
|
|
44
|
+
app_name = env("APP_NAME")
|
|
45
|
+
if not _APP_NAME_RE.fullmatch(app_name):
|
|
46
|
+
raise RuntimeError(
|
|
47
|
+
"Invalid APP_NAME. Must contain only letters A-Z or a-z, "
|
|
48
|
+
"no spaces, numbers, or symbols."
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
default_log_level = env("DEFAULT_LOG_LEVEL", default="WARNING")
|
|
52
|
+
app_log_level = env("APP_LOG_LEVEL", default="INFO")
|
|
53
|
+
tortoise_log_level = env("TORTOISE_LOG_LEVEL", default="WARNING")
|
|
54
|
+
arq_log_level = env("ARQ_LOG_LEVEL", default="INFO")
|
|
55
|
+
generic_log_level = env("GENERIC_LOG_LEVEL", default="WARNING")
|
|
56
|
+
|
|
57
|
+
return {
|
|
58
|
+
"APP_NAME": app_name,
|
|
59
|
+
"CORS_ORIGINS": [
|
|
60
|
+
re.compile(r"^http://(localhost|127\.0\.0\.1):\d+$"),
|
|
61
|
+
re.compile(r"^https://(\S+\.)?vercel\.app$"),
|
|
62
|
+
re.compile(r"^https://(\S+\.)?blockanalitica\.com$"),
|
|
63
|
+
],
|
|
64
|
+
"CORS_METHODS": ["GET", "POST", "PUT", "DELETE", "OPTIONS"],
|
|
65
|
+
"SENTRY_DSN": env("SENTRY_DSN", ""),
|
|
66
|
+
"STATSD_HOST": env("STATSD_HOST", ""),
|
|
67
|
+
"STATSD_PORT": env("STATSD_PORT", default=8125),
|
|
68
|
+
"STATSD_PREFIX": env("STATSD_PREFIX", default=app_name.lower()),
|
|
69
|
+
"REDIS_HOST": env("REDIS_HOST", ""),
|
|
70
|
+
"REDIS_PORT": env.int("REDIS_PORT", 6379),
|
|
71
|
+
"REDIS_DB": env.int("REDIS_DB", 2),
|
|
72
|
+
"CACHE_MIDDLEWARE_SECONDS": 5,
|
|
73
|
+
"CACHE_MIDDLEWARE_ENABLED": env.bool("CACHE_MIDDLEWARE_ENABLED", False),
|
|
74
|
+
"ARQ_CRON_DISABLED_KEY": f"{app_name.lower()}:arq:cron:disabled",
|
|
75
|
+
"DEFAULT_LOG_LEVEL": env("DEFAULT_LOG_LEVEL", default="WARNING"),
|
|
76
|
+
"APP_LOG_LEVEL": env("APP_LOG_LEVEL", default="INFO"),
|
|
77
|
+
"TORTOISE_LOG_LEVEL": env("TORTOISE_LOG_LEVEL", default="WARNING"),
|
|
78
|
+
"ARQ_LOG_LEVEL": env("ARQ_LOG_LEVEL", default="INFO"),
|
|
79
|
+
"GENERIC_LOG_LEVEL": env("GENERIC_LOG_LEVEL", default="WARNING"),
|
|
80
|
+
"LOGGING_CONFIG": {
|
|
81
|
+
"version": 1,
|
|
82
|
+
"disable_existing_loggers": False,
|
|
83
|
+
"formatters": {
|
|
84
|
+
"default": {
|
|
85
|
+
"format": (
|
|
86
|
+
"[%(asctime)s] %(name)s {%(module)s:%(lineno)d} "
|
|
87
|
+
"PID=%(process)d [%(levelname)s] - %(message)s"
|
|
88
|
+
),
|
|
89
|
+
},
|
|
90
|
+
},
|
|
91
|
+
"handlers": {
|
|
92
|
+
"console": {
|
|
93
|
+
"level": "DEBUG",
|
|
94
|
+
"class": "logging.StreamHandler",
|
|
95
|
+
"stream": "ext://sys.stdout",
|
|
96
|
+
"formatter": "default",
|
|
97
|
+
},
|
|
98
|
+
},
|
|
99
|
+
"root": {
|
|
100
|
+
"level": default_log_level,
|
|
101
|
+
"handlers": ["console"],
|
|
102
|
+
},
|
|
103
|
+
"loggers": {
|
|
104
|
+
"bakit": {
|
|
105
|
+
"propagate": True,
|
|
106
|
+
"level": app_log_level,
|
|
107
|
+
},
|
|
108
|
+
"core": {
|
|
109
|
+
"propagate": True,
|
|
110
|
+
"level": app_log_level,
|
|
111
|
+
},
|
|
112
|
+
"tortoise": {
|
|
113
|
+
"propagate": True,
|
|
114
|
+
"level": tortoise_log_level,
|
|
115
|
+
},
|
|
116
|
+
"arq": {
|
|
117
|
+
"propagate": True,
|
|
118
|
+
"level": arq_log_level,
|
|
119
|
+
},
|
|
120
|
+
"asyncio": {
|
|
121
|
+
"propagate": True,
|
|
122
|
+
"level": generic_log_level,
|
|
123
|
+
},
|
|
124
|
+
},
|
|
125
|
+
},
|
|
126
|
+
}
|
bakit/init.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import logging.config
|
|
2
|
+
import os
|
|
3
|
+
from decimal import getcontext
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
from environs import Env
|
|
7
|
+
|
|
8
|
+
from bakit.config import load_settings
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def init_bakit(configure_settings=None, env_overrides=None):
|
|
12
|
+
"""
|
|
13
|
+
Initialize the bakit runtime for the current process.
|
|
14
|
+
|
|
15
|
+
This function is the single required entrypoint for all projects using bakit.
|
|
16
|
+
It must be called exactly once, and as early as possible, from each
|
|
17
|
+
executable entrypoint (e.g. server, worker, CLI).
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
env = Env()
|
|
21
|
+
# Need to pass in a path, otherwise it doesn't detect the correct path
|
|
22
|
+
env.read_env(Path(os.getcwd()) / ".env")
|
|
23
|
+
|
|
24
|
+
# Enable overriding env variables after they've been read from .env file
|
|
25
|
+
if env_overrides and isinstance(env_overrides, dict):
|
|
26
|
+
for key, value in env_overrides.items():
|
|
27
|
+
os.environ[key] = value
|
|
28
|
+
|
|
29
|
+
# Increase global Decimal precision to avoid InvalidOperation errors during
|
|
30
|
+
# quantize(). The default context precision (28 digits) is too low for our values
|
|
31
|
+
# which can exceed 28 significant digits (e.g., 14 integer + 18 fractional).
|
|
32
|
+
getcontext().prec = 60
|
|
33
|
+
|
|
34
|
+
# Load settings from env
|
|
35
|
+
settings = load_settings(env, configure_settings)
|
|
36
|
+
|
|
37
|
+
# Set up logging config
|
|
38
|
+
logging.config.dictConfig(settings.LOGGING_CONFIG)
|
bakit/sanic/app.py
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import hashlib
|
|
2
|
+
import hmac
|
|
1
3
|
from pathlib import Path
|
|
2
4
|
|
|
3
5
|
from orjson import dumps
|
|
@@ -9,13 +11,19 @@ from tortoise.contrib.sanic import register_tortoise
|
|
|
9
11
|
from bakit import settings
|
|
10
12
|
from bakit.sanic.listeners import setup_cache_listener, setup_sentry_listener
|
|
11
13
|
from bakit.sanic.middlewares import cache_middleware_request, cache_middleware_response
|
|
12
|
-
from bakit.settings import APP_NAME, LOGGING_CONFIG, TORTOISE_ORM
|
|
13
14
|
from bakit.utils.metrics import view_metrics
|
|
14
15
|
|
|
15
16
|
STATIC_DIR = Path(__file__).resolve().parent / "static"
|
|
16
17
|
|
|
18
|
+
_SENTRY_DEBUG_DIGEST = (
|
|
19
|
+
"b2cded34bf480236d91e54b631185347f52321b1fdc8c40b89ca507d3a1458ee"
|
|
20
|
+
)
|
|
21
|
+
_SENTRY_DEBUG_KEY = b"sentry-debug-v1"
|
|
17
22
|
|
|
18
|
-
|
|
23
|
+
|
|
24
|
+
def create_base_app(
|
|
25
|
+
app_name=settings.APP_NAME, log_config=settings.LOGGING_CONFIG, is_testing=False
|
|
26
|
+
):
|
|
19
27
|
app = Sanic(app_name, strict_slashes=True, log_config=log_config, dumps=dumps)
|
|
20
28
|
app.config.FALLBACK_ERROR_FORMAT = "json"
|
|
21
29
|
|
|
@@ -36,17 +44,34 @@ def create_base_app(app_name=APP_NAME, log_config=LOGGING_CONFIG, is_testing=Fal
|
|
|
36
44
|
app.register_middleware(cache_middleware_request, "request")
|
|
37
45
|
app.register_middleware(cache_middleware_response, "response")
|
|
38
46
|
|
|
39
|
-
# /ping/ endpoint is needed for load balancer health checks. Do not remove
|
|
47
|
+
# /ping/ endpoint is needed for load balancer health checks. Do not remove.
|
|
40
48
|
@app.route("/ping/", methods=["GET"])
|
|
41
49
|
@view_metrics()
|
|
42
50
|
async def health(request):
|
|
43
51
|
return text("pong", status=200)
|
|
44
52
|
|
|
53
|
+
# /sentry-debug/ endpoint is used for testing sentry integration. Do not remove.
|
|
54
|
+
@app.route("/sentry-debug/", methods=["GET"])
|
|
55
|
+
@view_metrics()
|
|
56
|
+
async def sentry_debug(request):
|
|
57
|
+
secret = request.args.get("secret")
|
|
58
|
+
if not secret:
|
|
59
|
+
return text("not found", status=404)
|
|
60
|
+
|
|
61
|
+
digest = hmac.new(
|
|
62
|
+
_SENTRY_DEBUG_KEY, secret.encode(), hashlib.sha256
|
|
63
|
+
).hexdigest()
|
|
64
|
+
if not hmac.compare_digest(digest, _SENTRY_DEBUG_DIGEST):
|
|
65
|
+
return text("not found", status=404)
|
|
66
|
+
|
|
67
|
+
a = 1 / 0
|
|
68
|
+
return text(str(a), status=500)
|
|
69
|
+
|
|
45
70
|
# Setup Tortoise ORM
|
|
46
71
|
if not is_testing:
|
|
47
72
|
register_tortoise(
|
|
48
73
|
app,
|
|
49
|
-
config=TORTOISE_ORM,
|
|
74
|
+
config=settings.TORTOISE_ORM,
|
|
50
75
|
generate_schemas=False,
|
|
51
76
|
)
|
|
52
77
|
|
bakit/settings.py
CHANGED
|
@@ -1,103 +1,6 @@
|
|
|
1
|
-
import
|
|
2
|
-
import sys
|
|
1
|
+
from bakit.config import settings
|
|
3
2
|
|
|
4
|
-
from environs import Env
|
|
5
3
|
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
CORS_ORIGINS = [
|
|
11
|
-
re.compile(r"^http://(localhost|127\.0\.0\.1):\d+$"),
|
|
12
|
-
re.compile(r"^https://(\S+\.)?vercel\.app$"),
|
|
13
|
-
re.compile(r"^https://(\S+\.)?blockanalitica\.com$"),
|
|
14
|
-
]
|
|
15
|
-
CORS_METHODS = ["GET", "POST", "PUT", "DELETE", "OPTIONS"]
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
TORTOISE_ORM = {
|
|
19
|
-
"connections": {
|
|
20
|
-
"default": env("APP_DB_URL"),
|
|
21
|
-
},
|
|
22
|
-
"apps": {
|
|
23
|
-
"core": {
|
|
24
|
-
"models": [],
|
|
25
|
-
"default_connection": "default",
|
|
26
|
-
},
|
|
27
|
-
},
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
SENTRY_DSN = env("SENTRY_DSN", "")
|
|
32
|
-
|
|
33
|
-
STATSD_HOST = env("STATSD_HOST", "")
|
|
34
|
-
STATSD_PORT = env("STATSD_PORT", default=8125)
|
|
35
|
-
STATSD_PREFIX = env("STATSD_PREFIX", default=APP_NAME)
|
|
36
|
-
|
|
37
|
-
REDIS_HOST = env("REDIS_HOST", "")
|
|
38
|
-
REDIS_PORT = env.int("REDIS_PORT", 6379)
|
|
39
|
-
REDIS_DB = env.int("REDIS_DB", 2)
|
|
40
|
-
|
|
41
|
-
CACHE_MIDDLEWARE_SECONDS = 5
|
|
42
|
-
CACHE_MIDDLEWARE_ENABLED = env.bool("CACHE_MIDDLEWARE_ENABLED", False)
|
|
43
|
-
|
|
44
|
-
APP_LOG_LEVEL = env("APP_LOG_LEVEL", default="INFO")
|
|
45
|
-
TORTOISE_LOG_LEVEL = env("TORTOISE_LOG_LEVEL", default="WARNING")
|
|
46
|
-
DEFAULT_LOG_LEVEL = env("DEFAULT_LOG_LEVEL", default="WARNING")
|
|
47
|
-
ARQ_LOG_LEVEL = env("ARQ_LOG_LEVEL", default="INFO")
|
|
48
|
-
CHAIN_HARVESTER_LOG_LEVEL = env("CHAIN_HARVESTER_LOG_LEVEL", default="WARNING")
|
|
49
|
-
|
|
50
|
-
LOGGING_CONFIG = {
|
|
51
|
-
"version": 1,
|
|
52
|
-
"disable_existing_loggers": False,
|
|
53
|
-
"formatters": {
|
|
54
|
-
"default": {
|
|
55
|
-
"format": (
|
|
56
|
-
"[%(asctime)s] %(name)s {%(module)s:%(lineno)d} "
|
|
57
|
-
"PID=%(process)d [%(levelname)s] - %(message)s"
|
|
58
|
-
),
|
|
59
|
-
},
|
|
60
|
-
},
|
|
61
|
-
"handlers": {
|
|
62
|
-
"console": {
|
|
63
|
-
"level": "DEBUG",
|
|
64
|
-
"class": "logging.StreamHandler",
|
|
65
|
-
"stream": sys.stdout,
|
|
66
|
-
"formatter": "default",
|
|
67
|
-
},
|
|
68
|
-
},
|
|
69
|
-
"loggers": {
|
|
70
|
-
"bakit": {
|
|
71
|
-
"propagate": True,
|
|
72
|
-
"level": APP_LOG_LEVEL,
|
|
73
|
-
},
|
|
74
|
-
"core": {
|
|
75
|
-
"propagate": True,
|
|
76
|
-
"level": APP_LOG_LEVEL,
|
|
77
|
-
},
|
|
78
|
-
"tortoise": {
|
|
79
|
-
"propagate": True,
|
|
80
|
-
"level": TORTOISE_LOG_LEVEL,
|
|
81
|
-
},
|
|
82
|
-
"tortoise.db_client": {
|
|
83
|
-
"propagate": True,
|
|
84
|
-
"level": TORTOISE_LOG_LEVEL,
|
|
85
|
-
},
|
|
86
|
-
"arq": {
|
|
87
|
-
"propagate": True,
|
|
88
|
-
"level": ARQ_LOG_LEVEL,
|
|
89
|
-
},
|
|
90
|
-
"arq.worker": {
|
|
91
|
-
"propagate": True,
|
|
92
|
-
"level": ARQ_LOG_LEVEL,
|
|
93
|
-
},
|
|
94
|
-
"chain_harvester": {
|
|
95
|
-
"propagate": True,
|
|
96
|
-
"level": CHAIN_HARVESTER_LOG_LEVEL,
|
|
97
|
-
},
|
|
98
|
-
"": {
|
|
99
|
-
"level": DEFAULT_LOG_LEVEL,
|
|
100
|
-
"handlers": ["console"],
|
|
101
|
-
},
|
|
102
|
-
},
|
|
103
|
-
}
|
|
4
|
+
def __getattr__(name):
|
|
5
|
+
# This makes `from bakit.settings import MY_SETTING` to work
|
|
6
|
+
return getattr(settings, name)
|
bakit/shell.py
CHANGED
|
@@ -1,24 +1,26 @@
|
|
|
1
1
|
# ruff: noqa: T100
|
|
2
|
-
import asyncio
|
|
3
2
|
import os
|
|
4
3
|
|
|
4
|
+
from IPython.core.async_helpers import get_asyncio_loop
|
|
5
5
|
from IPython.terminal.embed import InteractiveShellEmbed
|
|
6
6
|
from tortoise import Tortoise
|
|
7
7
|
|
|
8
|
+
from bakit import settings
|
|
8
9
|
|
|
9
|
-
|
|
10
|
+
|
|
11
|
+
def start_ipython_shell(extra_ns=None, banner=None):
|
|
10
12
|
"""
|
|
11
13
|
Async shell helper:
|
|
12
|
-
- initializes Tortoise with orm_config
|
|
13
14
|
- starts IPython if available (with top-level await support)
|
|
14
15
|
- falls back to stdlib interactive shell otherwise
|
|
15
16
|
- always closes DB connections when done
|
|
16
17
|
"""
|
|
17
|
-
asyncio.run(Tortoise.init(config=orm_config))
|
|
18
18
|
|
|
19
19
|
if banner is None:
|
|
20
20
|
banner = "Tortoise shell. If IPython is installed, top-level await should work."
|
|
21
21
|
|
|
22
|
+
loop = get_asyncio_loop()
|
|
23
|
+
|
|
22
24
|
ns = {
|
|
23
25
|
"Tortoise": Tortoise,
|
|
24
26
|
"os": os,
|
|
@@ -27,7 +29,8 @@ def start_ipython_shell(orm_config, extra_ns=None, banner=None):
|
|
|
27
29
|
ns.update(extra_ns)
|
|
28
30
|
|
|
29
31
|
try:
|
|
32
|
+
loop.run_until_complete(Tortoise.init(config=settings.TORTOISE_ORM))
|
|
30
33
|
shell = InteractiveShellEmbed(banner2=banner)
|
|
31
34
|
shell(local_ns=ns, global_ns=ns)
|
|
32
35
|
finally:
|
|
33
|
-
|
|
36
|
+
loop.run_until_complete(Tortoise.close_connections())
|
bakit/utils/db.py
CHANGED
bakit/utils/discord.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
|
|
3
|
+
import aiohttp
|
|
4
|
+
from discord import Webhook
|
|
5
|
+
from discord.embeds import Embed
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
async def send_webhook_embed(webhook_url, embed):
|
|
9
|
+
"""
|
|
10
|
+
Send an embed to a Discord webhook (async).
|
|
11
|
+
|
|
12
|
+
Args:
|
|
13
|
+
webhook_url (str): The Discord webhook URL.
|
|
14
|
+
embed (discord.Embed): Title of the embed.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
async with aiohttp.ClientSession() as session:
|
|
18
|
+
webhook = Webhook.from_url(webhook_url, session=session)
|
|
19
|
+
await webhook.send(embed=embed)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def make_embed(description, color):
|
|
23
|
+
return Embed(
|
|
24
|
+
description=description,
|
|
25
|
+
color=color,
|
|
26
|
+
timestamp=datetime.datetime.now(datetime.UTC),
|
|
27
|
+
)
|
bakit/utils/metrics.py
CHANGED
|
@@ -5,11 +5,18 @@ import statsd
|
|
|
5
5
|
|
|
6
6
|
from bakit import settings
|
|
7
7
|
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
8
|
+
_statsd_client = None
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def get_statsd_client():
|
|
12
|
+
global _statsd_client
|
|
13
|
+
if _statsd_client is None:
|
|
14
|
+
_statsd_client = statsd.StatsClient(
|
|
15
|
+
settings.STATSD_HOST,
|
|
16
|
+
settings.STATSD_PORT,
|
|
17
|
+
settings.STATSD_PREFIX,
|
|
18
|
+
)
|
|
19
|
+
return _statsd_client
|
|
13
20
|
|
|
14
21
|
|
|
15
22
|
def multinetworktimerd(key):
|
|
@@ -90,7 +97,7 @@ def timer(key):
|
|
|
90
97
|
>>> with metrics.timer('unique_key'):
|
|
91
98
|
... time.sleep(1)
|
|
92
99
|
"""
|
|
93
|
-
statsd_timer =
|
|
100
|
+
statsd_timer = get_statsd_client().timer(str(key))
|
|
94
101
|
statsd_timer.start()
|
|
95
102
|
try:
|
|
96
103
|
yield
|
|
@@ -111,7 +118,7 @@ def raw_timer(key, value):
|
|
|
111
118
|
if not isinstance(value, int | float):
|
|
112
119
|
return None
|
|
113
120
|
|
|
114
|
-
return
|
|
121
|
+
return get_statsd_client().timing(str(key), value)
|
|
115
122
|
|
|
116
123
|
|
|
117
124
|
def increment(key, delta=1, subname=None):
|
|
@@ -128,7 +135,7 @@ def increment(key, delta=1, subname=None):
|
|
|
128
135
|
if subname:
|
|
129
136
|
name += f".{subname}"
|
|
130
137
|
|
|
131
|
-
return
|
|
138
|
+
return get_statsd_client().incr(name, delta)
|
|
132
139
|
|
|
133
140
|
|
|
134
141
|
def decrement(key, delta=1, subname=None):
|
|
@@ -146,7 +153,7 @@ def decrement(key, delta=1, subname=None):
|
|
|
146
153
|
if subname:
|
|
147
154
|
name += f".{subname}"
|
|
148
155
|
|
|
149
|
-
return
|
|
156
|
+
return get_statsd_client().decr(name, delta)
|
|
150
157
|
|
|
151
158
|
|
|
152
159
|
def gauge(key, value=1, subname=None):
|
|
@@ -164,8 +171,8 @@ def gauge(key, value=1, subname=None):
|
|
|
164
171
|
# We never use the relative changes behaviour so attempt to always make it do the
|
|
165
172
|
# set value behaviour instead.
|
|
166
173
|
if value < 0:
|
|
167
|
-
|
|
168
|
-
return
|
|
174
|
+
get_statsd_client().gauge(name, 0)
|
|
175
|
+
return get_statsd_client().gauge(name, value)
|
|
169
176
|
|
|
170
177
|
|
|
171
178
|
def function_long_name(func, extra=None):
|
|
@@ -180,11 +187,11 @@ def auto_named_statsd_timer(function_to_decorate):
|
|
|
180
187
|
|
|
181
188
|
@wraps(function_to_decorate)
|
|
182
189
|
def incr_and_call(*args, **kwargs):
|
|
183
|
-
|
|
190
|
+
get_statsd_client().incr(call_name)
|
|
184
191
|
return function_to_decorate(*args, **kwargs)
|
|
185
192
|
|
|
186
193
|
timer_name = function_long_name(function_to_decorate, "time")
|
|
187
|
-
named_decorator =
|
|
194
|
+
named_decorator = get_statsd_client().timer(timer_name)
|
|
188
195
|
|
|
189
196
|
return named_decorator(incr_and_call)
|
|
190
197
|
|
|
@@ -218,9 +225,9 @@ def view_metrics_context(endpoint_name=None, instance=None):
|
|
|
218
225
|
else:
|
|
219
226
|
metric_base = "views.unknown.context"
|
|
220
227
|
|
|
221
|
-
|
|
228
|
+
get_statsd_client().incr(f"{metric_base}.hits")
|
|
222
229
|
|
|
223
|
-
with
|
|
230
|
+
with get_statsd_client().timer(f"{metric_base}.response_time"):
|
|
224
231
|
yield
|
|
225
232
|
|
|
226
233
|
|
|
@@ -252,10 +259,10 @@ def view_metrics(endpoint_name=None):
|
|
|
252
259
|
method_name = func.__name__
|
|
253
260
|
metric_base = f"views.{cls_name}.{method_name}"
|
|
254
261
|
|
|
255
|
-
|
|
262
|
+
get_statsd_client().incr(f"{metric_base}.hits")
|
|
256
263
|
|
|
257
264
|
# Time the function execution
|
|
258
|
-
with
|
|
265
|
+
with get_statsd_client().timer(f"{metric_base}.response_time"):
|
|
259
266
|
return await func(*args, **kwargs)
|
|
260
267
|
|
|
261
268
|
return wrapper
|
|
@@ -1,17 +1,18 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: balabs-kit
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.8
|
|
4
4
|
Author-email: tsifrer <3967564+tsifrer@users.noreply.github.com>
|
|
5
5
|
License-Expression: Apache-2.0
|
|
6
6
|
Requires-Python: >=3.13
|
|
7
7
|
Requires-Dist: aiocache[redis]>=0.12.3
|
|
8
|
+
Requires-Dist: aiohttp>=3.13.3
|
|
8
9
|
Requires-Dist: asyncclick>=8.3.0.7
|
|
9
10
|
Requires-Dist: asyncpg>=0.31.0
|
|
11
|
+
Requires-Dist: discord-py>=2.6.4
|
|
10
12
|
Requires-Dist: environs>=14.5.0
|
|
11
13
|
Requires-Dist: ipython>=9.8.0
|
|
12
14
|
Requires-Dist: nest-asyncio>=1.6.0
|
|
13
|
-
Requires-Dist:
|
|
14
|
-
Requires-Dist: sentry-sdk>=2.47.0
|
|
15
|
+
Requires-Dist: sentry-sdk>=2.48.0
|
|
15
16
|
Requires-Dist: statsd>=4.0.1
|
|
16
17
|
Requires-Dist: tortoise-orm>=0.25.1
|
|
17
18
|
Requires-Dist: uvloop>=0.22.1
|
|
@@ -19,6 +20,7 @@ Provides-Extra: arq
|
|
|
19
20
|
Requires-Dist: arq<1.0.0,>=0.26.3; extra == 'arq'
|
|
20
21
|
Requires-Dist: tortoise-plastron>=0.1.1; extra == 'arq'
|
|
21
22
|
Provides-Extra: sanic
|
|
23
|
+
Requires-Dist: orjson>=3.11.5; extra == 'sanic'
|
|
22
24
|
Requires-Dist: sanic-ext>=24.12.0; extra == 'sanic'
|
|
23
25
|
Requires-Dist: sanic>=25.3.0; extra == 'sanic'
|
|
24
26
|
Description-Content-Type: text/markdown
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
bakit/__init__.py,sha256=XMG_u43ldXqLHrJF_M9xkELRnSAsRgzLyOW4cwXhEew,114
|
|
2
|
+
bakit/cli.py,sha256=5eoyVi1MdI5FcK2_0mms4dZpAPJiakqR87ZIIAUCILo,5532
|
|
3
|
+
bakit/config.py,sha256=32ADsCi2qSwHJCRGU8jXzlRCwLpHZwixWcv_9Vmw6o0,4348
|
|
4
|
+
bakit/init.py,sha256=uzkB22ZcZEC_eC8tGDfiJ_AfRlymVfqGhj7zvyGdoi8,1301
|
|
5
|
+
bakit/settings.py,sha256=F4mA53W63VasDpAXUFDfdw9LA28U6t-4vkIMThy3taI,159
|
|
6
|
+
bakit/shell.py,sha256=FWUU9FdyCeNh2drDV7AOz8nZf3Dazidt4yRDvL8tFZg,973
|
|
7
|
+
bakit/arq/__init__.py,sha256=hd4_47_SC9DBII5jxkvpahPFDl0plsFkUo0kVVabwUo,190
|
|
8
|
+
bakit/arq/decorators.py,sha256=kdxyCpOotEPhB-D0IDSDfWflulNBq_s_CyjH4z61oBo,2705
|
|
9
|
+
bakit/arq/task_loader.py,sha256=vzzS3Fa0wzyRRsbb-3PiES61LBFleAHfOA_E9gjjUno,1417
|
|
10
|
+
bakit/arq/worker.py,sha256=rxSKC4b0hxYLSuLS3rCBr_pqcSYPtd4o-jLu8-pnHDw,2217
|
|
11
|
+
bakit/sanic/__init__.py,sha256=wxdGII3_XK2gmES3h5Kt28CVgND-kWvfam-e3IK3IhU,64
|
|
12
|
+
bakit/sanic/app.py,sha256=wTe7eO-aI1027MC7GA7MlBUL7TXi1RwJY8TSoMP5Big,2466
|
|
13
|
+
bakit/sanic/decorators.py,sha256=R4qyynBiB6BGoFzoCOYxtqGyjQ-VWY83A7T62XMXNmo,630
|
|
14
|
+
bakit/sanic/listeners.py,sha256=U828FErgb4IzTPswrOtHiaBSmudkIhcbBESj4LN3CA8,1111
|
|
15
|
+
bakit/sanic/middlewares.py,sha256=yNuZ_8JzdzJsEEv8adA8DZwMgfARpKIGbXhNRFrcolw,2536
|
|
16
|
+
bakit/sanic/views.py,sha256=0V2rqVZ6zrCkodNvu0AH4VYUn1RHiLYJMTdiX3rLKWM,11934
|
|
17
|
+
bakit/sanic/static/favicon.ico,sha256=O053tj32xBSw6b3qd7LjQe0-PoJ0oxrHvPX7pbz6csE,15406
|
|
18
|
+
bakit/sanic/static/favicon.png,sha256=QBRACJH47kTn6J42ta2r-tY4eQBLer2vqDGxF4ZPJi4,223
|
|
19
|
+
bakit/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
20
|
+
bakit/utils/db.py,sha256=aKL20rB8aBxb26wUHmPDIOwcwE46_ypqckcWw_EEu08,1551
|
|
21
|
+
bakit/utils/discord.py,sha256=o575uH8DY9o01lp7uxFgxjhs-CfvmQ3lIPE4Vru6Ikk,651
|
|
22
|
+
bakit/utils/metrics.py,sha256=ggQkoTmi8fL_H_cIQrzvrDIp8lGCwGIaB_aMFJs0RXQ,7551
|
|
23
|
+
bakit/utils/sql_helpers.py,sha256=JhgA7WPZQb-LFfjW_fXYRnc2yS0p-kxGzSNaX9cSULE,2046
|
|
24
|
+
balabs_kit-0.0.8.dist-info/METADATA,sha256=-haPAUj2lMz2Qs4G3HcpF-J3AfHmc2pq0cRVXyY3uls,1012
|
|
25
|
+
balabs_kit-0.0.8.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
26
|
+
balabs_kit-0.0.8.dist-info/RECORD,,
|
|
@@ -1,18 +0,0 @@
|
|
|
1
|
-
bakit/__init__.py,sha256=4GZKi13lDTD25YBkGakhZyEQZWTER_OWQMNPoH_UM2c,22
|
|
2
|
-
bakit/settings.py,sha256=AusdP9EtOBDBuNvcK3gieKyKTD_f6lAz44-AQxvR73s,2612
|
|
3
|
-
bakit/shell.py,sha256=jW7cnGbJc60sOmA_jgnCWKaE9Pm6QOvoYtTsPQVuma4,889
|
|
4
|
-
bakit/sanic/__init__.py,sha256=wxdGII3_XK2gmES3h5Kt28CVgND-kWvfam-e3IK3IhU,64
|
|
5
|
-
bakit/sanic/app.py,sha256=M6vF1480Ya9Xon-9YbWJCluyrIAQoQga_SrDo2814NM,1737
|
|
6
|
-
bakit/sanic/decorators.py,sha256=R4qyynBiB6BGoFzoCOYxtqGyjQ-VWY83A7T62XMXNmo,630
|
|
7
|
-
bakit/sanic/listeners.py,sha256=U828FErgb4IzTPswrOtHiaBSmudkIhcbBESj4LN3CA8,1111
|
|
8
|
-
bakit/sanic/middlewares.py,sha256=yNuZ_8JzdzJsEEv8adA8DZwMgfARpKIGbXhNRFrcolw,2536
|
|
9
|
-
bakit/sanic/views.py,sha256=0V2rqVZ6zrCkodNvu0AH4VYUn1RHiLYJMTdiX3rLKWM,11934
|
|
10
|
-
bakit/sanic/static/favicon.ico,sha256=O053tj32xBSw6b3qd7LjQe0-PoJ0oxrHvPX7pbz6csE,15406
|
|
11
|
-
bakit/sanic/static/favicon.png,sha256=QBRACJH47kTn6J42ta2r-tY4eQBLer2vqDGxF4ZPJi4,223
|
|
12
|
-
bakit/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
13
|
-
bakit/utils/db.py,sha256=xknEFED-kZisn4sPZP7IlSk1-5-0LaGGtUXqqdoFfSw,1601
|
|
14
|
-
bakit/utils/metrics.py,sha256=osCYEllEHe7aPxJBNvdRO-O7UaRuIMyxWfm_W0FNPgQ,7306
|
|
15
|
-
bakit/utils/sql_helpers.py,sha256=JhgA7WPZQb-LFfjW_fXYRnc2yS0p-kxGzSNaX9cSULE,2046
|
|
16
|
-
balabs_kit-0.0.3.dist-info/METADATA,sha256=XCjrGlkS6qTsv4chWeyxG_hfrIAqzOlnNa5P_xcVUcg,930
|
|
17
|
-
balabs_kit-0.0.3.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
18
|
-
balabs_kit-0.0.3.dist-info/RECORD,,
|
|
File without changes
|