db-retry 0.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,219 @@
1
+ Metadata-Version: 2.4
2
+ Name: db-retry
3
+ Version: 0.3.0
4
+ Summary: PostgreSQL and SQLAlchemy Tools
5
+ Keywords: python,postgresql,sqlalchemy
6
+ Author: community-of-python
7
+ License-Expression: MIT
8
+ Classifier: Programming Language :: Python :: 3.13
9
+ Classifier: Programming Language :: Python :: 3.14
10
+ Classifier: Typing :: Typed
11
+ Classifier: Topic :: Software Development :: Libraries
12
+ Requires-Dist: tenacity
13
+ Requires-Dist: sqlalchemy[asyncio]
14
+ Requires-Dist: asyncpg
15
+ Requires-Python: >=3.13, <4
16
+ Project-URL: repository, https://github.com/modern-python/sa-utils
17
+ Description-Content-Type: text/markdown
18
+
19
+ # db-retry
20
+
21
+ A Python library providing robust retry mechanisms, connection utilities, and transaction helpers for PostgreSQL and SQLAlchemy applications.
22
+
23
+ ## Features
24
+
25
+ - **Retry Decorators**: Automatic retry logic for transient database errors
26
+ - **Connection Factories**: Robust connection handling with multi-host support
27
+ - **DSN Utilities**: Flexible Data Source Name parsing and manipulation
28
+ - **Transaction Helpers**: Simplified transaction management with automatic cleanup
29
+
30
+ ## Installation
31
+
32
+ ### Using uv
33
+
34
+ ```bash
35
+ uv add db-retry
36
+ ```
37
+
38
+ ### Using pip
39
+
40
+ ```bash
41
+ pip install db-retry
42
+ ```
43
+
44
+ ## ORM-Based Usage Examples
45
+
46
+ ### 1. Database Operations with Automatic Retry
47
+
48
+ Protect your database operations from transient failures using ORM models:
49
+
50
+ ```python
51
+ import asyncio
52
+ import sqlalchemy as sa
53
+ from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
54
+ from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
55
+ from db_retry import postgres_retry
56
+
57
+
58
+ class User(DeclarativeBase):
59
+ __tablename__ = "users"
60
+
61
+ id: Mapped[int] = mapped_column(primary_key=True)
62
+ name: Mapped[str] = mapped_column(sa.String())
63
+ email: Mapped[str] = mapped_column(sa.String(), index=True)
64
+
65
+
66
+ # Apply retry logic to ORM operations
67
+ @postgres_retry
68
+ async def get_user_by_email(session: AsyncSession, email: str) -> User:
69
+ return await session.scalar(
70
+ sa.select(User).where(User.email == email)
71
+ )
72
+
73
+
74
+ async def main():
75
+ engine = create_async_engine("postgresql+asyncpg://user:pass@localhost/mydb")
76
+ async with AsyncSession(engine) as session:
77
+ # Automatically retries on connection failures or serialization errors
78
+ user = await get_user_by_email(session, "john.doe@example.com")
79
+ if user:
80
+ print(f"Found user: {user.name}")
81
+
82
+
83
+ asyncio.run(main())
84
+ ```
85
+
86
+ ### 2. High Availability Database Connections
87
+
88
+ Set up resilient database connections with multiple fallback hosts:
89
+
90
+ ```python
91
+ import sqlalchemy as sa
92
+ from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
93
+ from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
94
+ from db_retry import build_connection_factory, build_db_dsn
95
+
96
+ # Configure multiple database hosts for high availability
97
+ multi_host_dsn = (
98
+ "postgresql://user:password@/"
99
+ "myapp_db?"
100
+ "host=primary-db:5432&"
101
+ "host=secondary-db:5432&"
102
+ "host=backup-db:5432"
103
+ )
104
+
105
+ # Build production-ready DSN
106
+ dsn = build_db_dsn(
107
+ db_dsn=multi_host_dsn,
108
+ database_name="production_database",
109
+ drivername="postgresql+asyncpg"
110
+ )
111
+
112
+ # Create connection factory with timeout
113
+ connection_factory = build_connection_factory(
114
+ url=dsn,
115
+ timeout=5.0 # 5 second connection timeout
116
+ )
117
+
118
+ # Engine will automatically try different hosts on failure
119
+ engine = create_async_engine(dsn, async_creator=connection_factory)
120
+ ```
121
+
122
+ ### 3. Simplified Transaction Management
123
+
124
+ Handle database transactions with automatic cleanup using ORM:
125
+
126
+ ```python
127
+ import dataclasses
128
+ import datetime
129
+ import typing
130
+
131
+ from schemas import AnalyticsEventCreate, AnalyticsEvent
132
+ from db_retry import Transaction, postgres_retry
133
+
134
+ from your_service_name.database.tables import EventsTable
135
+ from your_service_name.producers.analytics_service_events_producer import AnalyticsEventsProducer
136
+ from your_service_name.repositories.events_repository import EventsRepository
137
+ from your_service_name.settings import settings
138
+
139
+
140
+ @dataclasses.dataclass(kw_only=True, frozen=True, slots=True)
141
+ class CreateEventUseCase:
142
+ events_repository: EventsRepository
143
+ transaction: Transaction
144
+ analytics_events_producer: AnalyticsEventsProducer
145
+
146
+ @postgres_retry
147
+ async def __call__(
148
+ self,
149
+ event_create_data: AnalyticsEventCreate,
150
+ ) -> AnalyticsEvent:
151
+ async with self.transaction:
152
+ model: typing.Final = EventsTable(
153
+ **event_create_data.model_dump(),
154
+ created_at=datetime.datetime.now(tz=settings.common.default_timezone),
155
+ )
156
+ saved_event: typing.Final[EventsTable] = await self.events_repository.create(model)
157
+ event: typing.Final = AnalyticsEvent.model_validate(saved_event)
158
+ await self.analytics_events_producer.send_message(event)
159
+ await self.transaction.commit()
160
+ return event
161
+
162
+ ```
163
+
164
+ ### 4. Serializable Transactions for Consistency
165
+
166
+ Use serializable isolation level to prevent race conditions with ORM:
167
+
168
+ ```python
169
+ from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
170
+ from db_retry import Transaction
171
+
172
+
173
+ async def main():
174
+ engine = create_async_engine("postgresql+asyncpg://user:pass@localhost/mydb")
175
+
176
+ async with AsyncSession(engine) as session:
177
+ strict_transaction = Transaction(
178
+ session=session,
179
+ isolation_level="SERIALIZABLE",
180
+ )
181
+ # use strict_transaction where needed
182
+ ```
183
+
184
+ ## Configuration
185
+
186
+ The library can be configured using environment variables:
187
+
188
+ | Variable | Description | Default |
189
+ |-------------------------|--------------------------------------------------|---------|
190
+ | `DB_RETRY_RETRIES_NUMBER` | Number of retry attempts for database operations | 3 |
191
+
192
+ Example:
193
+ ```bash
194
+ export DB_UTILS_RETRIES_NUMBER=5
195
+ ```
196
+
197
+ ## API Reference
198
+
199
+ ### Retry Decorator
200
+ - `@postgres_retry` - Decorator for async functions that should retry on database errors
201
+
202
+ ### Connection Utilities
203
+ - `build_connection_factory(url, timeout)` - Creates a connection factory for multi-host setups
204
+ - `build_db_dsn(db_dsn, database_name, use_replica=False, drivername="postgresql")` - Builds a DSN with specified parameters
205
+ - `is_dsn_multihost(db_dsn)` - Checks if a DSN contains multiple hosts
206
+
207
+ ### Transaction Helper
208
+ - `Transaction(session, isolation_level=None)` - Context manager for simplified transaction handling
209
+
210
+ ## Requirements
211
+
212
+ - Python 3.13+
213
+ - SQLAlchemy with asyncio support
214
+ - asyncpg PostgreSQL driver
215
+ - tenacity for retry logic
216
+
217
+ ## License
218
+
219
+ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
@@ -0,0 +1,201 @@
1
+ # db-retry
2
+
3
+ A Python library providing robust retry mechanisms, connection utilities, and transaction helpers for PostgreSQL and SQLAlchemy applications.
4
+
5
+ ## Features
6
+
7
+ - **Retry Decorators**: Automatic retry logic for transient database errors
8
+ - **Connection Factories**: Robust connection handling with multi-host support
9
+ - **DSN Utilities**: Flexible Data Source Name parsing and manipulation
10
+ - **Transaction Helpers**: Simplified transaction management with automatic cleanup
11
+
12
+ ## Installation
13
+
14
+ ### Using uv
15
+
16
+ ```bash
17
+ uv add db-retry
18
+ ```
19
+
20
+ ### Using pip
21
+
22
+ ```bash
23
+ pip install db-retry
24
+ ```
25
+
26
+ ## ORM-Based Usage Examples
27
+
28
+ ### 1. Database Operations with Automatic Retry
29
+
30
+ Protect your database operations from transient failures using ORM models:
31
+
32
+ ```python
33
+ import asyncio
34
+ import sqlalchemy as sa
35
+ from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
36
+ from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
37
+ from db_retry import postgres_retry
38
+
39
+
40
+ class User(DeclarativeBase):
41
+ __tablename__ = "users"
42
+
43
+ id: Mapped[int] = mapped_column(primary_key=True)
44
+ name: Mapped[str] = mapped_column(sa.String())
45
+ email: Mapped[str] = mapped_column(sa.String(), index=True)
46
+
47
+
48
+ # Apply retry logic to ORM operations
49
+ @postgres_retry
50
+ async def get_user_by_email(session: AsyncSession, email: str) -> User:
51
+ return await session.scalar(
52
+ sa.select(User).where(User.email == email)
53
+ )
54
+
55
+
56
+ async def main():
57
+ engine = create_async_engine("postgresql+asyncpg://user:pass@localhost/mydb")
58
+ async with AsyncSession(engine) as session:
59
+ # Automatically retries on connection failures or serialization errors
60
+ user = await get_user_by_email(session, "john.doe@example.com")
61
+ if user:
62
+ print(f"Found user: {user.name}")
63
+
64
+
65
+ asyncio.run(main())
66
+ ```
67
+
68
+ ### 2. High Availability Database Connections
69
+
70
+ Set up resilient database connections with multiple fallback hosts:
71
+
72
+ ```python
73
+ import sqlalchemy as sa
74
+ from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
75
+ from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
76
+ from db_retry import build_connection_factory, build_db_dsn
77
+
78
+ # Configure multiple database hosts for high availability
79
+ multi_host_dsn = (
80
+ "postgresql://user:password@/"
81
+ "myapp_db?"
82
+ "host=primary-db:5432&"
83
+ "host=secondary-db:5432&"
84
+ "host=backup-db:5432"
85
+ )
86
+
87
+ # Build production-ready DSN
88
+ dsn = build_db_dsn(
89
+ db_dsn=multi_host_dsn,
90
+ database_name="production_database",
91
+ drivername="postgresql+asyncpg"
92
+ )
93
+
94
+ # Create connection factory with timeout
95
+ connection_factory = build_connection_factory(
96
+ url=dsn,
97
+ timeout=5.0 # 5 second connection timeout
98
+ )
99
+
100
+ # Engine will automatically try different hosts on failure
101
+ engine = create_async_engine(dsn, async_creator=connection_factory)
102
+ ```
103
+
104
+ ### 3. Simplified Transaction Management
105
+
106
+ Handle database transactions with automatic cleanup using ORM:
107
+
108
+ ```python
109
+ import dataclasses
110
+ import datetime
111
+ import typing
112
+
113
+ from schemas import AnalyticsEventCreate, AnalyticsEvent
114
+ from db_retry import Transaction, postgres_retry
115
+
116
+ from your_service_name.database.tables import EventsTable
117
+ from your_service_name.producers.analytics_service_events_producer import AnalyticsEventsProducer
118
+ from your_service_name.repositories.events_repository import EventsRepository
119
+ from your_service_name.settings import settings
120
+
121
+
122
+ @dataclasses.dataclass(kw_only=True, frozen=True, slots=True)
123
+ class CreateEventUseCase:
124
+ events_repository: EventsRepository
125
+ transaction: Transaction
126
+ analytics_events_producer: AnalyticsEventsProducer
127
+
128
+ @postgres_retry
129
+ async def __call__(
130
+ self,
131
+ event_create_data: AnalyticsEventCreate,
132
+ ) -> AnalyticsEvent:
133
+ async with self.transaction:
134
+ model: typing.Final = EventsTable(
135
+ **event_create_data.model_dump(),
136
+ created_at=datetime.datetime.now(tz=settings.common.default_timezone),
137
+ )
138
+ saved_event: typing.Final[EventsTable] = await self.events_repository.create(model)
139
+ event: typing.Final = AnalyticsEvent.model_validate(saved_event)
140
+ await self.analytics_events_producer.send_message(event)
141
+ await self.transaction.commit()
142
+ return event
143
+
144
+ ```
145
+
146
+ ### 4. Serializable Transactions for Consistency
147
+
148
+ Use serializable isolation level to prevent race conditions with ORM:
149
+
150
+ ```python
151
+ from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
152
+ from db_retry import Transaction
153
+
154
+
155
+ async def main():
156
+ engine = create_async_engine("postgresql+asyncpg://user:pass@localhost/mydb")
157
+
158
+ async with AsyncSession(engine) as session:
159
+ strict_transaction = Transaction(
160
+ session=session,
161
+ isolation_level="SERIALIZABLE",
162
+ )
163
+ # use strict_transaction where needed
164
+ ```
165
+
166
+ ## Configuration
167
+
168
+ The library can be configured using environment variables:
169
+
170
+ | Variable | Description | Default |
171
+ |-------------------------|--------------------------------------------------|---------|
172
+ | `DB_RETRY_RETRIES_NUMBER` | Number of retry attempts for database operations | 3 |
173
+
174
+ Example:
175
+ ```bash
176
+ export DB_UTILS_RETRIES_NUMBER=5
177
+ ```
178
+
179
+ ## API Reference
180
+
181
+ ### Retry Decorator
182
+ - `@postgres_retry` - Decorator for async functions that should retry on database errors
183
+
184
+ ### Connection Utilities
185
+ - `build_connection_factory(url, timeout)` - Creates a connection factory for multi-host setups
186
+ - `build_db_dsn(db_dsn, database_name, use_replica=False, drivername="postgresql")` - Builds a DSN with specified parameters
187
+ - `is_dsn_multihost(db_dsn)` - Checks if a DSN contains multiple hosts
188
+
189
+ ### Transaction Helper
190
+ - `Transaction(session, isolation_level=None)` - Context manager for simplified transaction handling
191
+
192
+ ## Requirements
193
+
194
+ - Python 3.13+
195
+ - SQLAlchemy with asyncio support
196
+ - asyncpg PostgreSQL driver
197
+ - tenacity for retry logic
198
+
199
+ ## License
200
+
201
+ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
@@ -0,0 +1,13 @@
1
+ from db_retry.connections import build_connection_factory
2
+ from db_retry.dsn import build_db_dsn, is_dsn_multihost
3
+ from db_retry.retry import postgres_retry
4
+ from db_retry.transaction import Transaction
5
+
6
+
7
+ __all__ = [
8
+ "Transaction",
9
+ "build_connection_factory",
10
+ "build_db_dsn",
11
+ "is_dsn_multihost",
12
+ "postgres_retry",
13
+ ]
@@ -0,0 +1,79 @@
1
+ import logging
2
+ import random
3
+ import typing
4
+ from operator import itemgetter
5
+
6
+ import asyncpg
7
+ import sqlalchemy
8
+ from asyncpg.connect_utils import SessionAttribute
9
+ from sqlalchemy.dialects.postgresql.asyncpg import PGDialect_asyncpg
10
+
11
+
12
+ if typing.TYPE_CHECKING:
13
+ ConnectionType = asyncpg.Connection[typing.Any]
14
+
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ def build_connection_factory(
20
+ url: sqlalchemy.URL,
21
+ timeout: float,
22
+ ) -> typing.Callable[[], typing.Awaitable["ConnectionType"]]:
23
+ connect_args: typing.Final[dict[str, typing.Any]] = PGDialect_asyncpg().create_connect_args(url)[1] # type: ignore[no-untyped-call]
24
+ raw_target_session_attrs: typing.Final[str | None] = connect_args.pop("target_session_attrs", None)
25
+ target_session_attrs: typing.Final[SessionAttribute | None] = (
26
+ SessionAttribute(raw_target_session_attrs) if raw_target_session_attrs else None
27
+ )
28
+
29
+ raw_hosts: typing.Final[str | list[str]] = connect_args.pop("host")
30
+ raw_ports: typing.Final[int | list[int] | None] = connect_args.pop("port", None)
31
+ hosts_and_ports: list[tuple[str, int]]
32
+ hosts: str | list[str]
33
+ ports: int | list[int] | None
34
+ if isinstance(raw_hosts, list) and isinstance(raw_ports, list):
35
+ hosts_and_ports = list(zip(raw_hosts, raw_ports, strict=True))
36
+ random.shuffle(hosts_and_ports)
37
+ hosts = list(map(itemgetter(0), hosts_and_ports))
38
+ ports = list(map(itemgetter(1), hosts_and_ports))
39
+ else:
40
+ hosts_and_ports = []
41
+ hosts = raw_hosts
42
+ ports = raw_ports
43
+
44
+ async def _connection_factory() -> "ConnectionType":
45
+ connection: ConnectionType
46
+ nonlocal hosts_and_ports
47
+ try:
48
+ connection = await asyncpg.connect(
49
+ **connect_args,
50
+ host=hosts,
51
+ port=ports,
52
+ timeout=timeout,
53
+ target_session_attrs=target_session_attrs,
54
+ )
55
+ return connection # noqa: TRY300
56
+ except TimeoutError:
57
+ if not hosts_and_ports:
58
+ raise
59
+
60
+ logger.warning("Failed to fetch asyncpg connection. Trying host by host.")
61
+
62
+ hosts_and_ports_copy: typing.Final = hosts_and_ports.copy()
63
+ random.shuffle(hosts_and_ports_copy)
64
+ for one_host, one_port in hosts_and_ports_copy:
65
+ try:
66
+ connection = await asyncpg.connect(
67
+ **connect_args,
68
+ host=one_host,
69
+ port=one_port,
70
+ timeout=timeout,
71
+ target_session_attrs=target_session_attrs,
72
+ )
73
+ return connection # noqa: TRY300
74
+ except (TimeoutError, OSError, asyncpg.TargetServerAttributeNotMatched) as exc:
75
+ logger.warning("Failed to fetch asyncpg connection from %s, %s", one_host, exc)
76
+ msg: typing.Final = f"None of the hosts match the target attribute requirement {target_session_attrs}"
77
+ raise asyncpg.TargetServerAttributeNotMatched(msg)
78
+
79
+ return _connection_factory
@@ -0,0 +1,34 @@
1
+ import typing
2
+
3
+ import sqlalchemy as sa
4
+
5
+
6
+ def build_db_dsn(
7
+ db_dsn: str,
8
+ database_name: str,
9
+ use_replica: bool = False,
10
+ drivername: str = "postgresql",
11
+ ) -> sa.URL:
12
+ """Parse DSN variable and replace some parts.
13
+
14
+ - DSN stored in format postgresql://login:password@/db_placeholder?host=host1&host=host2
15
+ https://docs.sqlalchemy.org/en/20/dialects/postgresql.html#specifying-multiple-fallback-hosts
16
+ - 'db_placeholder' is replaced here with service database name
17
+ - `target_session_attrs` is chosen based on `use_replica` arg
18
+ """
19
+ parsed_db_dsn: typing.Final = sa.make_url(db_dsn)
20
+ db_dsn_query: typing.Final[dict[str, typing.Any]] = dict(parsed_db_dsn.query or {})
21
+ return parsed_db_dsn.set(
22
+ database=database_name,
23
+ drivername=drivername,
24
+ query=db_dsn_query
25
+ | {
26
+ "target_session_attrs": "prefer-standby" if use_replica else "read-write",
27
+ },
28
+ )
29
+
30
+
31
+ def is_dsn_multihost(db_dsn: str) -> bool:
32
+ parsed_db_dsn: typing.Final = sa.make_url(db_dsn)
33
+ db_dsn_query: typing.Final[dict[str, typing.Any]] = dict(parsed_db_dsn.query or {})
34
+ return bool((hosts := db_dsn_query.get("host")) and isinstance(hosts, tuple) and len(hosts) > 1)
File without changes
@@ -0,0 +1,42 @@
1
+ import functools
2
+ import logging
3
+ import typing
4
+
5
+ import asyncpg
6
+ import tenacity
7
+ from sqlalchemy.exc import DBAPIError
8
+
9
+ from db_retry import settings
10
+
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ def _retry_handler(exception: BaseException) -> bool:
16
+ if (
17
+ isinstance(exception, DBAPIError)
18
+ and hasattr(exception, "orig")
19
+ and isinstance(exception.orig.__cause__, (asyncpg.SerializationError, asyncpg.PostgresConnectionError)) # type: ignore[union-attr]
20
+ ):
21
+ logger.debug("postgres_retry, retrying")
22
+ return True
23
+
24
+ logger.debug("postgres_retry, giving up on retry")
25
+ return False
26
+
27
+
28
+ def postgres_retry[**P, T](
29
+ func: typing.Callable[P, typing.Coroutine[None, None, T]],
30
+ ) -> typing.Callable[P, typing.Coroutine[None, None, T]]:
31
+ @tenacity.retry(
32
+ stop=tenacity.stop_after_attempt(settings.DB_RETRY_RETRIES_NUMBER),
33
+ wait=tenacity.wait_exponential_jitter(),
34
+ retry=tenacity.retry_if_exception(_retry_handler),
35
+ reraise=True,
36
+ before=tenacity.before_log(logger, logging.DEBUG),
37
+ )
38
+ @functools.wraps(func)
39
+ async def wrapped_method(*args: P.args, **kwargs: P.kwargs) -> T:
40
+ return await func(*args, **kwargs)
41
+
42
+ return wrapped_method
@@ -0,0 +1,5 @@
1
+ import os
2
+ import typing
3
+
4
+
5
+ DB_RETRY_RETRIES_NUMBER: typing.Final = int(os.getenv("DB_RETRY_RETRIES_NUMBER", "3"))
@@ -0,0 +1,28 @@
1
+ import dataclasses
2
+
3
+ import typing_extensions
4
+ from sqlalchemy.engine.interfaces import IsolationLevel
5
+ from sqlalchemy.ext import asyncio as sa_async
6
+
7
+
8
+ @dataclasses.dataclass(kw_only=True, frozen=True, slots=True)
9
+ class Transaction:
10
+ session: sa_async.AsyncSession
11
+ isolation_level: IsolationLevel | None = None
12
+
13
+ async def __aenter__(self) -> typing_extensions.Self:
14
+ if self.isolation_level:
15
+ await self.session.connection(execution_options={"isolation_level": self.isolation_level})
16
+
17
+ if not self.session.in_transaction():
18
+ await self.session.begin()
19
+ return self
20
+
21
+ async def __aexit__(self, *args: object, **kwargs: object) -> None:
22
+ await self.session.close()
23
+
24
+ async def commit(self) -> None:
25
+ await self.session.commit()
26
+
27
+ async def rollback(self) -> None:
28
+ await self.session.rollback()
@@ -0,0 +1,92 @@
1
+ [project]
2
+ name = "db-retry"
3
+ description = "PostgreSQL and SQLAlchemy Tools"
4
+ authors = [
5
+ { name = "community-of-python" },
6
+ ]
7
+ readme = "README.md"
8
+ requires-python = ">=3.13,<4"
9
+ license = "MIT"
10
+ keywords = [
11
+ "python",
12
+ "postgresql",
13
+ "sqlalchemy",
14
+ ]
15
+ classifiers = [
16
+ "Programming Language :: Python :: 3.13",
17
+ "Programming Language :: Python :: 3.14",
18
+ "Typing :: Typed",
19
+ "Topic :: Software Development :: Libraries",
20
+ ]
21
+ version = "0.3.0"
22
+ dependencies = [
23
+ "tenacity",
24
+ "sqlalchemy[asyncio]",
25
+ "asyncpg",
26
+ ]
27
+
28
+ [project.urls]
29
+ repository = "https://github.com/modern-python/sa-utils"
30
+
31
+ [dependency-groups]
32
+ dev = [
33
+ "pytest",
34
+ "pytest-cov",
35
+ "pytest-asyncio",
36
+ ]
37
+ lint = [
38
+ "ruff",
39
+ "mypy",
40
+ "eof-fixer",
41
+ "asyncpg-stubs",
42
+ ]
43
+
44
+ [build-system]
45
+ requires = ["uv_build"]
46
+ build-backend = "uv_build"
47
+
48
+ [tool.uv.build-backend]
49
+ module-name = "db_retry"
50
+ module-root = ""
51
+
52
+ [tool.mypy]
53
+ python_version = "3.13"
54
+ strict = true
55
+
56
+ [tool.ruff]
57
+ fix = false
58
+ unsafe-fixes = true
59
+ line-length = 120
60
+ target-version = "py313"
61
+
62
+ [tool.ruff.format]
63
+ docstring-code-format = true
64
+
65
+ [tool.ruff.lint]
66
+ select = ["ALL"]
67
+ ignore = [
68
+ "D1", # allow missing docstrings
69
+ "S101", # allow asserts
70
+ "TCH", # ignore flake8-type-checking
71
+ "FBT", # allow boolean args
72
+ "D203", # "one-blank-line-before-class" conflicting with D211
73
+ "D213", # "multi-line-summary-second-line" conflicting with D212
74
+ "COM812", # flake8-commas "Trailing comma missing"
75
+ "ISC001", # flake8-implicit-str-concat
76
+ "G004", # allow f-strings in logging
77
+ ]
78
+ isort.lines-after-imports = 2
79
+ isort.no-lines-before = ["standard-library", "local-folder"]
80
+
81
+ [tool.pytest.ini_options]
82
+ addopts = "--cov=. --cov-report term-missing"
83
+ asyncio_mode = "auto"
84
+ asyncio_default_fixture_loop_scope = "function"
85
+
86
+ [tool.coverage.run]
87
+ concurrency = ["thread", "greenlet"]
88
+
89
+ [tool.coverage.report]
90
+ exclude_also = [
91
+ "if typing.TYPE_CHECKING:",
92
+ ]