alt-python-pynosqlc-dynamodb 1.0.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alt_python_pynosqlc_dynamodb-1.0.4/.gitignore +30 -0
- alt_python_pynosqlc_dynamodb-1.0.4/PKG-INFO +88 -0
- alt_python_pynosqlc_dynamodb-1.0.4/README.md +64 -0
- alt_python_pynosqlc_dynamodb-1.0.4/pynosqlc/dynamodb/__init__.py +20 -0
- alt_python_pynosqlc_dynamodb-1.0.4/pynosqlc/dynamodb/dynamo_client.py +111 -0
- alt_python_pynosqlc_dynamodb-1.0.4/pynosqlc/dynamodb/dynamo_collection.py +133 -0
- alt_python_pynosqlc_dynamodb-1.0.4/pynosqlc/dynamodb/dynamo_driver.py +86 -0
- alt_python_pynosqlc_dynamodb-1.0.4/pynosqlc/dynamodb/dynamo_filter_translator.py +210 -0
- alt_python_pynosqlc_dynamodb-1.0.4/pyproject.toml +46 -0
- alt_python_pynosqlc_dynamodb-1.0.4/tests/__init__.py +0 -0
- alt_python_pynosqlc_dynamodb-1.0.4/tests/test_compliance.py +62 -0
- alt_python_pynosqlc_dynamodb-1.0.4/tests/test_dynamo_filter_translator.py +310 -0
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
|
|
2
|
+
# ── GSD baseline (auto-generated) ──
|
|
3
|
+
.gsd
|
|
4
|
+
.DS_Store
|
|
5
|
+
Thumbs.db
|
|
6
|
+
*.swp
|
|
7
|
+
*.swo
|
|
8
|
+
*~
|
|
9
|
+
.idea/
|
|
10
|
+
.vscode/
|
|
11
|
+
*.code-workspace
|
|
12
|
+
.env
|
|
13
|
+
.env.*
|
|
14
|
+
!.env.example
|
|
15
|
+
node_modules/
|
|
16
|
+
.next/
|
|
17
|
+
dist/
|
|
18
|
+
build/
|
|
19
|
+
__pycache__/
|
|
20
|
+
*.pyc
|
|
21
|
+
.venv/
|
|
22
|
+
venv/
|
|
23
|
+
target/
|
|
24
|
+
vendor/
|
|
25
|
+
*.log
|
|
26
|
+
coverage/
|
|
27
|
+
.cache/
|
|
28
|
+
tmp/
|
|
29
|
+
|
|
30
|
+
/.bg-shell/
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: alt-python-pynosqlc-dynamodb
|
|
3
|
+
Version: 1.0.4
|
|
4
|
+
Summary: DynamoDB driver for pynosqlc
|
|
5
|
+
Project-URL: Homepage, https://github.com/alt-python/pynosqlc
|
|
6
|
+
Project-URL: Repository, https://github.com/alt-python/pynosqlc
|
|
7
|
+
Project-URL: Documentation, https://github.com/alt-python/pynosqlc#getting-started
|
|
8
|
+
Project-URL: Bug Tracker, https://github.com/alt-python/pynosqlc/issues
|
|
9
|
+
Author: Craig Parravicini, Claude (Anthropic)
|
|
10
|
+
License: MIT
|
|
11
|
+
Keywords: async,aws,database,driver,dynamodb,nosql
|
|
12
|
+
Classifier: Development Status :: 5 - Production/Stable
|
|
13
|
+
Classifier: Framework :: AsyncIO
|
|
14
|
+
Classifier: Intended Audience :: Developers
|
|
15
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
16
|
+
Classifier: Programming Language :: Python :: 3
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
18
|
+
Classifier: Topic :: Database
|
|
19
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
20
|
+
Requires-Python: >=3.12
|
|
21
|
+
Requires-Dist: aioboto3>=2.7
|
|
22
|
+
Requires-Dist: alt-python-pynosqlc-core
|
|
23
|
+
Description-Content-Type: text/markdown
|
|
24
|
+
|
|
25
|
+
# pynosqlc-dynamodb
|
|
26
|
+
|
|
27
|
+
DynamoDB driver for [pynosqlc](https://github.com/alt-python/pynosqlc) — connects to Amazon DynamoDB (or DynamoDB Local) via aioboto3.
|
|
28
|
+
|
|
29
|
+
## Install
|
|
30
|
+
|
|
31
|
+
```
|
|
32
|
+
pip install alt-python-pynosqlc-dynamodb
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
## Requirements
|
|
36
|
+
|
|
37
|
+
- Python 3.12+
|
|
38
|
+
- aioboto3 2.7+
|
|
39
|
+
- AWS credentials configured (`~/.aws/credentials`, environment variables, or IAM role)
|
|
40
|
+
- For local development: [DynamoDB Local](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DynamoDBLocal.html)
|
|
41
|
+
|
|
42
|
+
## Usage
|
|
43
|
+
|
|
44
|
+
### AWS (production)
|
|
45
|
+
|
|
46
|
+
```python
|
|
47
|
+
import asyncio
|
|
48
|
+
from pynosqlc.core import DriverManager, Filter
|
|
49
|
+
import pynosqlc.dynamodb # auto-registers DynamoDriver
|
|
50
|
+
|
|
51
|
+
async def main():
|
|
52
|
+
async with await DriverManager.get_client('pynosqlc:dynamodb:us-east-1') as client:
|
|
53
|
+
col = client.get_collection('orders')
|
|
54
|
+
await col.store('o1', {'item': 'widget', 'qty': 5})
|
|
55
|
+
f = Filter.where('qty').gt(0).build()
|
|
56
|
+
async for doc in await col.find(f):
|
|
57
|
+
print(doc)
|
|
58
|
+
|
|
59
|
+
asyncio.run(main())
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
### DynamoDB Local
|
|
63
|
+
|
|
64
|
+
Pass `endpoint` in the properties dict to point at a local instance:
|
|
65
|
+
|
|
66
|
+
```python
|
|
67
|
+
async with await DriverManager.get_client(
|
|
68
|
+
'pynosqlc:dynamodb:us-east-1',
|
|
69
|
+
properties={'endpoint': 'http://localhost:8000'},
|
|
70
|
+
) as client:
|
|
71
|
+
...
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
## URL scheme
|
|
75
|
+
|
|
76
|
+
```
|
|
77
|
+
pynosqlc:dynamodb:<aws-region>
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
Example: `pynosqlc:dynamodb:us-east-1`
|
|
81
|
+
|
|
82
|
+
Optional properties:
|
|
83
|
+
|
|
84
|
+
| Key | Description |
|
|
85
|
+
|---|---|
|
|
86
|
+
| `endpoint` | Override endpoint URL (e.g. `http://localhost:8000` for DynamoDB Local) |
|
|
87
|
+
| `aws_access_key_id` | AWS access key (falls back to environment / credential chain) |
|
|
88
|
+
| `aws_secret_access_key` | AWS secret key (falls back to environment / credential chain) |
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
# pynosqlc-dynamodb
|
|
2
|
+
|
|
3
|
+
DynamoDB driver for [pynosqlc](https://github.com/alt-python/pynosqlc) — connects to Amazon DynamoDB (or DynamoDB Local) via aioboto3.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
```
|
|
8
|
+
pip install alt-python-pynosqlc-dynamodb
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Requirements
|
|
12
|
+
|
|
13
|
+
- Python 3.12+
|
|
14
|
+
- aioboto3 2.7+
|
|
15
|
+
- AWS credentials configured (`~/.aws/credentials`, environment variables, or IAM role)
|
|
16
|
+
- For local development: [DynamoDB Local](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DynamoDBLocal.html)
|
|
17
|
+
|
|
18
|
+
## Usage
|
|
19
|
+
|
|
20
|
+
### AWS (production)
|
|
21
|
+
|
|
22
|
+
```python
|
|
23
|
+
import asyncio
|
|
24
|
+
from pynosqlc.core import DriverManager, Filter
|
|
25
|
+
import pynosqlc.dynamodb # auto-registers DynamoDriver
|
|
26
|
+
|
|
27
|
+
async def main():
|
|
28
|
+
async with await DriverManager.get_client('pynosqlc:dynamodb:us-east-1') as client:
|
|
29
|
+
col = client.get_collection('orders')
|
|
30
|
+
await col.store('o1', {'item': 'widget', 'qty': 5})
|
|
31
|
+
f = Filter.where('qty').gt(0).build()
|
|
32
|
+
async for doc in await col.find(f):
|
|
33
|
+
print(doc)
|
|
34
|
+
|
|
35
|
+
asyncio.run(main())
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
### DynamoDB Local
|
|
39
|
+
|
|
40
|
+
Pass `endpoint` in the properties dict to point at a local instance:
|
|
41
|
+
|
|
42
|
+
```python
|
|
43
|
+
async with await DriverManager.get_client(
|
|
44
|
+
'pynosqlc:dynamodb:us-east-1',
|
|
45
|
+
properties={'endpoint': 'http://localhost:8000'},
|
|
46
|
+
) as client:
|
|
47
|
+
...
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
## URL scheme
|
|
51
|
+
|
|
52
|
+
```
|
|
53
|
+
pynosqlc:dynamodb:<aws-region>
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
Example: `pynosqlc:dynamodb:us-east-1`
|
|
57
|
+
|
|
58
|
+
Optional properties:
|
|
59
|
+
|
|
60
|
+
| Key | Description |
|
|
61
|
+
|---|---|
|
|
62
|
+
| `endpoint` | Override endpoint URL (e.g. `http://localhost:8000` for DynamoDB Local) |
|
|
63
|
+
| `aws_access_key_id` | AWS access key (falls back to environment / credential chain) |
|
|
64
|
+
| `aws_secret_access_key` | AWS secret key (falls back to environment / credential chain) |
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
"""DynamoDB driver for pynosqlc.
|
|
2
|
+
|
|
3
|
+
Importing this package auto-registers the DynamoDriver with DriverManager.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
from pynosqlc.dynamodb.dynamo_filter_translator import DynamoFilterTranslator
|
|
9
|
+
from pynosqlc.dynamodb.dynamo_client import DynamoClient
|
|
10
|
+
from pynosqlc.dynamodb.dynamo_collection import DynamoCollection
|
|
11
|
+
|
|
12
|
+
# Import last — triggers module-level DriverManager.register_driver() call.
|
|
13
|
+
from pynosqlc.dynamodb.dynamo_driver import DynamoDriver # noqa: F401 (side-effect import)
|
|
14
|
+
|
|
15
|
+
__all__ = [
|
|
16
|
+
"DynamoDriver",
|
|
17
|
+
"DynamoClient",
|
|
18
|
+
"DynamoCollection",
|
|
19
|
+
"DynamoFilterTranslator",
|
|
20
|
+
]
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
"""
|
|
2
|
+
dynamo_client.py — DynamoClient: a pynosqlc Client backed by aioboto3 DynamoDB resource.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
import botocore.exceptions
|
|
8
|
+
|
|
9
|
+
from pynosqlc.core.client import Client
|
|
10
|
+
from pynosqlc.dynamodb.dynamo_collection import DynamoCollection
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class DynamoClient(Client):
|
|
14
|
+
"""A pynosqlc Client backed by an aioboto3 DynamoDB resource.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
url: the original ``pynosqlc:dynamodb:<region>`` URL.
|
|
18
|
+
session: an :class:`aioboto3.Session` instance.
|
|
19
|
+
region: the AWS region name.
|
|
20
|
+
endpoint: optional endpoint URL override (e.g. for DynamoDB Local).
|
|
21
|
+
properties: driver-specific properties dict.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
def __init__(
|
|
25
|
+
self,
|
|
26
|
+
url: str,
|
|
27
|
+
session,
|
|
28
|
+
region: str,
|
|
29
|
+
endpoint: str | None,
|
|
30
|
+
properties: dict | None = None,
|
|
31
|
+
) -> None:
|
|
32
|
+
super().__init__({"url": url})
|
|
33
|
+
self._session = session
|
|
34
|
+
self._region = region
|
|
35
|
+
self._endpoint = endpoint
|
|
36
|
+
self._properties = properties or {}
|
|
37
|
+
self._resource = None
|
|
38
|
+
self._resource_ctx = None
|
|
39
|
+
self._table_cache: set[str] = set()
|
|
40
|
+
|
|
41
|
+
async def _open(self) -> None:
|
|
42
|
+
"""Enter the aioboto3 DynamoDB resource context manager.
|
|
43
|
+
|
|
44
|
+
Must be called by the driver after constructing this client.
|
|
45
|
+
"""
|
|
46
|
+
self._resource_ctx = self._session.resource(
|
|
47
|
+
"dynamodb",
|
|
48
|
+
region_name=self._region,
|
|
49
|
+
endpoint_url=self._endpoint,
|
|
50
|
+
)
|
|
51
|
+
self._resource = await self._resource_ctx.__aenter__()
|
|
52
|
+
|
|
53
|
+
def _get_collection(self, name: str) -> DynamoCollection:
|
|
54
|
+
"""Create and return a :class:`DynamoCollection` for *name*."""
|
|
55
|
+
return DynamoCollection(self, name)
|
|
56
|
+
|
|
57
|
+
async def _close(self) -> None:
|
|
58
|
+
"""Exit the aioboto3 DynamoDB resource context manager."""
|
|
59
|
+
if self._resource_ctx is not None:
|
|
60
|
+
await self._resource_ctx.__aexit__(None, None, None)
|
|
61
|
+
self._resource_ctx = None
|
|
62
|
+
self._resource = None
|
|
63
|
+
|
|
64
|
+
async def ensure_table(self, name: str) -> None:
|
|
65
|
+
"""Ensure the DynamoDB table *name* exists, creating it if necessary.
|
|
66
|
+
|
|
67
|
+
Uses ``_pk`` (String) as the partition key and ``PAY_PER_REQUEST``
|
|
68
|
+
billing so no capacity planning is required.
|
|
69
|
+
|
|
70
|
+
Idempotent: if the table already exists (verified or from cache),
|
|
71
|
+
this is a no-op.
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
name: the DynamoDB table name.
|
|
75
|
+
"""
|
|
76
|
+
if name in self._table_cache:
|
|
77
|
+
return
|
|
78
|
+
|
|
79
|
+
table = await self._resource.Table(name)
|
|
80
|
+
|
|
81
|
+
# Check whether the table exists.
|
|
82
|
+
try:
|
|
83
|
+
await table.load()
|
|
84
|
+
# Table exists — cache it and return.
|
|
85
|
+
self._table_cache.add(name)
|
|
86
|
+
return
|
|
87
|
+
except botocore.exceptions.ClientError as exc:
|
|
88
|
+
code = exc.response["Error"]["Code"]
|
|
89
|
+
if code not in ("ResourceNotFoundException",):
|
|
90
|
+
raise
|
|
91
|
+
|
|
92
|
+
# Table does not exist — create it.
|
|
93
|
+
try:
|
|
94
|
+
await self._resource.create_table(
|
|
95
|
+
TableName=name,
|
|
96
|
+
KeySchema=[{"AttributeName": "_pk", "KeyType": "HASH"}],
|
|
97
|
+
AttributeDefinitions=[
|
|
98
|
+
{"AttributeName": "_pk", "AttributeType": "S"}
|
|
99
|
+
],
|
|
100
|
+
BillingMode="PAY_PER_REQUEST",
|
|
101
|
+
)
|
|
102
|
+
except botocore.exceptions.ClientError as exc:
|
|
103
|
+
code = exc.response["Error"]["Code"]
|
|
104
|
+
if code not in ("ResourceInUseException", "TableAlreadyExistsException"):
|
|
105
|
+
raise
|
|
106
|
+
# Race: another coroutine already created the table — that's fine.
|
|
107
|
+
|
|
108
|
+
# Wait for the table to become ACTIVE.
|
|
109
|
+
waiter_table = await self._resource.Table(name)
|
|
110
|
+
await waiter_table.wait_until_exists()
|
|
111
|
+
self._table_cache.add(name)
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
"""
|
|
2
|
+
dynamo_collection.py — DynamoCollection: a pynosqlc Collection backed by a DynamoDB table.
|
|
3
|
+
|
|
4
|
+
The primary key attribute is ``_pk`` (String). All other document fields are
|
|
5
|
+
stored as top-level DynamoDB item attributes.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import uuid
|
|
11
|
+
|
|
12
|
+
from pynosqlc.core.collection import Collection
|
|
13
|
+
from pynosqlc.core.cursor import Cursor
|
|
14
|
+
from pynosqlc.dynamodb.dynamo_filter_translator import DynamoFilterTranslator
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class DynamoCollection(Collection):
|
|
18
|
+
"""A pynosqlc Collection backed by a DynamoDB table.
|
|
19
|
+
|
|
20
|
+
The DynamoDB table is created on first access with ``_pk`` as the
|
|
21
|
+
partition key. :meth:`DynamoClient.ensure_table` is called at the start
|
|
22
|
+
of every operation — it is a no-op after the first successful call.
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
client: the owning :class:`~pynosqlc.dynamodb.DynamoClient`.
|
|
26
|
+
name: the table / collection name.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
def __init__(self, client, name: str) -> None:
|
|
30
|
+
super().__init__(client, name)
|
|
31
|
+
|
|
32
|
+
async def _get(self, key: str) -> dict | None:
|
|
33
|
+
"""Retrieve a document by its ``_pk``."""
|
|
34
|
+
await self._client.ensure_table(self._name)
|
|
35
|
+
table = await self._client._resource.Table(self._name)
|
|
36
|
+
|
|
37
|
+
resp = await table.get_item(Key={"_pk": key})
|
|
38
|
+
item = resp.get("Item")
|
|
39
|
+
if item is None:
|
|
40
|
+
return None
|
|
41
|
+
return {k: v for k, v in item.items() if k != "_pk"}
|
|
42
|
+
|
|
43
|
+
async def _store(self, key: str, doc: dict) -> None:
|
|
44
|
+
"""Upsert a document, setting ``_pk = key``."""
|
|
45
|
+
await self._client.ensure_table(self._name)
|
|
46
|
+
table = await self._client._resource.Table(self._name)
|
|
47
|
+
|
|
48
|
+
await table.put_item(Item={**doc, "_pk": key})
|
|
49
|
+
|
|
50
|
+
async def _delete(self, key: str) -> None:
|
|
51
|
+
"""Delete the document at ``_pk = key``."""
|
|
52
|
+
await self._client.ensure_table(self._name)
|
|
53
|
+
table = await self._client._resource.Table(self._name)
|
|
54
|
+
|
|
55
|
+
await table.delete_item(Key={"_pk": key})
|
|
56
|
+
|
|
57
|
+
async def _insert(self, doc: dict) -> str:
|
|
58
|
+
"""Insert a document with a generated UUID ``_pk``; return the id."""
|
|
59
|
+
await self._client.ensure_table(self._name)
|
|
60
|
+
table = await self._client._resource.Table(self._name)
|
|
61
|
+
|
|
62
|
+
id_ = str(uuid.uuid4())
|
|
63
|
+
await table.put_item(Item={**doc, "_pk": id_})
|
|
64
|
+
return id_
|
|
65
|
+
|
|
66
|
+
async def _update(self, key: str, patch: dict) -> None:
|
|
67
|
+
"""Patch the document at ``_pk = key`` using a SET expression.
|
|
68
|
+
|
|
69
|
+
Only provided fields are updated; others are preserved.
|
|
70
|
+
``_pk`` is never patched even if present in *patch*.
|
|
71
|
+
"""
|
|
72
|
+
await self._client.ensure_table(self._name)
|
|
73
|
+
table = await self._client._resource.Table(self._name)
|
|
74
|
+
|
|
75
|
+
# Build SET expression: skip _pk to avoid overwriting the partition key.
|
|
76
|
+
fields = [(k, v) for k, v in patch.items() if k != "_pk"]
|
|
77
|
+
if not fields:
|
|
78
|
+
return
|
|
79
|
+
|
|
80
|
+
set_parts = []
|
|
81
|
+
expr_names: dict[str, str] = {}
|
|
82
|
+
expr_values: dict[str, object] = {}
|
|
83
|
+
|
|
84
|
+
for i, (field, value) in enumerate(fields):
|
|
85
|
+
name_alias = f"#attr{i}"
|
|
86
|
+
value_alias = f":val{i}"
|
|
87
|
+
set_parts.append(f"{name_alias} = {value_alias}")
|
|
88
|
+
expr_names[name_alias] = field
|
|
89
|
+
expr_values[value_alias] = value
|
|
90
|
+
|
|
91
|
+
update_expr = "SET " + ", ".join(set_parts)
|
|
92
|
+
|
|
93
|
+
await table.update_item(
|
|
94
|
+
Key={"_pk": key},
|
|
95
|
+
UpdateExpression=update_expr,
|
|
96
|
+
ExpressionAttributeNames=expr_names,
|
|
97
|
+
ExpressionAttributeValues=expr_values,
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
async def _find(self, ast: dict) -> Cursor:
|
|
101
|
+
"""Find documents matching the filter AST.
|
|
102
|
+
|
|
103
|
+
Translates the AST to a DynamoDB FilterExpression, performs a full
|
|
104
|
+
table scan with automatic pagination, strips ``_pk`` from each item,
|
|
105
|
+
and wraps results in a :class:`~pynosqlc.core.Cursor`.
|
|
106
|
+
"""
|
|
107
|
+
await self._client.ensure_table(self._name)
|
|
108
|
+
table = await self._client._resource.Table(self._name)
|
|
109
|
+
|
|
110
|
+
filter_expr, attr_names, attr_values = DynamoFilterTranslator.translate(ast)
|
|
111
|
+
|
|
112
|
+
scan_kwargs: dict = {}
|
|
113
|
+
if filter_expr is not None:
|
|
114
|
+
scan_kwargs["FilterExpression"] = filter_expr
|
|
115
|
+
scan_kwargs["ExpressionAttributeNames"] = attr_names
|
|
116
|
+
if attr_values:
|
|
117
|
+
scan_kwargs["ExpressionAttributeValues"] = attr_values
|
|
118
|
+
|
|
119
|
+
# Paginated scan.
|
|
120
|
+
items: list[dict] = []
|
|
121
|
+
resp = await table.scan(**scan_kwargs)
|
|
122
|
+
items.extend(resp.get("Items", []))
|
|
123
|
+
|
|
124
|
+
while "LastEvaluatedKey" in resp:
|
|
125
|
+
resp = await table.scan(
|
|
126
|
+
ExclusiveStartKey=resp["LastEvaluatedKey"],
|
|
127
|
+
**scan_kwargs,
|
|
128
|
+
)
|
|
129
|
+
items.extend(resp.get("Items", []))
|
|
130
|
+
|
|
131
|
+
# Strip the internal _pk field before returning to callers.
|
|
132
|
+
docs = [{k: v for k, v in item.items() if k != "_pk"} for item in items]
|
|
133
|
+
return Cursor(docs)
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
"""
|
|
2
|
+
dynamo_driver.py — DynamoDriver: connects to DynamoDB via aioboto3.
|
|
3
|
+
|
|
4
|
+
URL scheme: pynosqlc:dynamodb:<region>
|
|
5
|
+
e.g. pynosqlc:dynamodb:us-east-1
|
|
6
|
+
|
|
7
|
+
Auto-registers with DriverManager on import.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from __future__ import annotations
|
|
11
|
+
|
|
12
|
+
import os
|
|
13
|
+
|
|
14
|
+
import aioboto3
|
|
15
|
+
|
|
16
|
+
from pynosqlc.core.driver import Driver
|
|
17
|
+
from pynosqlc.core.driver_manager import DriverManager
|
|
18
|
+
from pynosqlc.dynamodb.dynamo_client import DynamoClient
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class DynamoDriver(Driver):
|
|
22
|
+
"""Driver that creates :class:`DynamoClient` instances.
|
|
23
|
+
|
|
24
|
+
URL prefix: ``pynosqlc:dynamodb:``
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
URL_PREFIX: str = "pynosqlc:dynamodb:"
|
|
28
|
+
|
|
29
|
+
def accepts_url(self, url: str) -> bool:
|
|
30
|
+
"""Return ``True`` for ``'pynosqlc:dynamodb:'`` URLs."""
|
|
31
|
+
return isinstance(url, str) and url.startswith(self.URL_PREFIX)
|
|
32
|
+
|
|
33
|
+
async def connect(
|
|
34
|
+
self,
|
|
35
|
+
url: str,
|
|
36
|
+
properties: dict | None = None,
|
|
37
|
+
) -> DynamoClient:
|
|
38
|
+
"""Create and return an open :class:`DynamoClient`.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
url: ``pynosqlc:dynamodb:<region>``
|
|
42
|
+
properties: optional dict; supports:
|
|
43
|
+
- ``endpoint``: override endpoint URL (e.g. for DynamoDB Local)
|
|
44
|
+
- ``aws_access_key_id``: AWS access key (optional)
|
|
45
|
+
- ``aws_secret_access_key``: AWS secret key (optional)
|
|
46
|
+
|
|
47
|
+
Returns:
|
|
48
|
+
An open :class:`DynamoClient`.
|
|
49
|
+
"""
|
|
50
|
+
props = properties or {}
|
|
51
|
+
|
|
52
|
+
region = url[len(self.URL_PREFIX):]
|
|
53
|
+
if not region:
|
|
54
|
+
region = "us-east-1"
|
|
55
|
+
|
|
56
|
+
endpoint = props.get("endpoint") or os.environ.get("DYNAMODB_ENDPOINT")
|
|
57
|
+
|
|
58
|
+
session_kwargs: dict = {}
|
|
59
|
+
if endpoint:
|
|
60
|
+
# When using a local endpoint (DynamoDB Local / LocalStack) without
|
|
61
|
+
# real credentials, supply dummy values so boto3 does not raise a
|
|
62
|
+
# NoCredentialsError.
|
|
63
|
+
has_real_creds = (
|
|
64
|
+
os.environ.get("AWS_ACCESS_KEY_ID")
|
|
65
|
+
or os.environ.get("AWS_PROFILE")
|
|
66
|
+
or os.environ.get("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI")
|
|
67
|
+
or os.environ.get("AWS_WEB_IDENTITY_TOKEN_FILE")
|
|
68
|
+
)
|
|
69
|
+
if not has_real_creds:
|
|
70
|
+
session_kwargs["aws_access_key_id"] = props.get(
|
|
71
|
+
"aws_access_key_id", "dummy"
|
|
72
|
+
)
|
|
73
|
+
session_kwargs["aws_secret_access_key"] = props.get(
|
|
74
|
+
"aws_secret_access_key", "dummy"
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
session = aioboto3.Session(**session_kwargs)
|
|
78
|
+
|
|
79
|
+
client = DynamoClient(url, session, region, endpoint, props)
|
|
80
|
+
await client._open()
|
|
81
|
+
return client
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
# Auto-register on import — a single shared instance is sufficient.
|
|
85
|
+
_driver = DynamoDriver()
|
|
86
|
+
DriverManager.register_driver(_driver)
|
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
"""
|
|
2
|
+
dynamo_filter_translator.py — Translates a pynosqlc Filter AST to a DynamoDB
|
|
3
|
+
FilterExpression triple.
|
|
4
|
+
|
|
5
|
+
Returns
|
|
6
|
+
-------
|
|
7
|
+
tuple[str | None, dict, dict]
|
|
8
|
+
(filter_expression, expression_attribute_names, expression_attribute_values)
|
|
9
|
+
|
|
10
|
+
When the AST is None or empty (no conditions), returns ``(None, {}, {})``.
|
|
11
|
+
|
|
12
|
+
Design
|
|
13
|
+
------
|
|
14
|
+
A fresh ``_TranslatorState`` is created for each ``translate()`` call. The
|
|
15
|
+
state carries monotonically-increasing counters for field-name aliases
|
|
16
|
+
(``#n0``, ``#n1``, …) and value aliases (``:v0``, ``:v1``, …) so that
|
|
17
|
+
compound filters across multiple fields never produce collisions.
|
|
18
|
+
|
|
19
|
+
All field references go through ``ExpressionAttributeNames`` to avoid
|
|
20
|
+
DynamoDB reserved-word conflicts.
|
|
21
|
+
|
|
22
|
+
Supported operators
|
|
23
|
+
-------------------
|
|
24
|
+
eq, ne, gt, gte, lt, lte, contains, in, nin, exists
|
|
25
|
+
|
|
26
|
+
Composite node types
|
|
27
|
+
--------------------
|
|
28
|
+
and, or, not
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
from __future__ import annotations
|
|
32
|
+
|
|
33
|
+
from typing import Any
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class DynamoFilterTranslator:
|
|
37
|
+
"""Stateless translator from pynosqlc Filter AST to DynamoDB expression triple."""
|
|
38
|
+
|
|
39
|
+
@staticmethod
|
|
40
|
+
def translate(
|
|
41
|
+
ast: dict | None,
|
|
42
|
+
) -> tuple[str | None, dict, dict]:
|
|
43
|
+
"""Translate a Filter AST node to a DynamoDB expression triple.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
ast: a Filter AST node, or ``None`` / empty dict (matches all).
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
A tuple of ``(filter_expression, expression_attribute_names,
|
|
50
|
+
expression_attribute_values)``. When the AST is falsy or has no
|
|
51
|
+
conditions, returns ``(None, {}, {})``.
|
|
52
|
+
|
|
53
|
+
Raises:
|
|
54
|
+
ValueError: if an unknown AST node type or operator is encountered.
|
|
55
|
+
"""
|
|
56
|
+
if not ast:
|
|
57
|
+
return (None, {}, {})
|
|
58
|
+
|
|
59
|
+
# An 'and' node with an empty conditions list also means "match all"
|
|
60
|
+
if ast.get("type") in ("and", "or") and not ast.get("conditions"):
|
|
61
|
+
return (None, {}, {})
|
|
62
|
+
|
|
63
|
+
state = _TranslatorState()
|
|
64
|
+
expr = state._node(ast)
|
|
65
|
+
if expr is None:
|
|
66
|
+
return (None, {}, {})
|
|
67
|
+
return (expr, state.attr_names, state.attr_values)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
# ---------------------------------------------------------------------------
|
|
71
|
+
# Internal stateful translator
|
|
72
|
+
# ---------------------------------------------------------------------------
|
|
73
|
+
|
|
74
|
+
class _TranslatorState:
|
|
75
|
+
"""Carries mutable translation state for a single translate() call."""
|
|
76
|
+
|
|
77
|
+
def __init__(self) -> None:
|
|
78
|
+
self.name_idx: int = 0
|
|
79
|
+
self.value_idx: int = 0
|
|
80
|
+
self.attr_names: dict[str, str] = {}
|
|
81
|
+
self.attr_values: dict[str, Any] = {}
|
|
82
|
+
|
|
83
|
+
# ── Counter helpers ──────────────────────────────────────────────────────
|
|
84
|
+
|
|
85
|
+
def _field_alias(self, field: str) -> str:
|
|
86
|
+
"""Allocate the next #nX alias for *field* and record the mapping."""
|
|
87
|
+
alias = f"#n{self.name_idx}"
|
|
88
|
+
self.attr_names[alias] = field
|
|
89
|
+
self.name_idx += 1
|
|
90
|
+
return alias
|
|
91
|
+
|
|
92
|
+
def _value_alias(self, value: Any) -> str:
|
|
93
|
+
"""Allocate the next :vX alias for *value* and record the mapping."""
|
|
94
|
+
alias = f":v{self.value_idx}"
|
|
95
|
+
self.attr_values[alias] = value
|
|
96
|
+
self.value_idx += 1
|
|
97
|
+
return alias
|
|
98
|
+
|
|
99
|
+
# ── Node dispatcher ──────────────────────────────────────────────────────
|
|
100
|
+
|
|
101
|
+
def _node(self, ast: dict) -> str | None:
|
|
102
|
+
"""Recursively translate an AST node to an expression string."""
|
|
103
|
+
node_type = ast.get("type")
|
|
104
|
+
|
|
105
|
+
if node_type == "and":
|
|
106
|
+
return self._and_node(ast)
|
|
107
|
+
|
|
108
|
+
if node_type == "or":
|
|
109
|
+
return self._or_node(ast)
|
|
110
|
+
|
|
111
|
+
if node_type == "not":
|
|
112
|
+
return self._not_node(ast)
|
|
113
|
+
|
|
114
|
+
if node_type == "condition":
|
|
115
|
+
return self._condition(ast)
|
|
116
|
+
|
|
117
|
+
raise ValueError(f"Unknown filter AST node type: {node_type!r}")
|
|
118
|
+
|
|
119
|
+
# ── Composite nodes ──────────────────────────────────────────────────────
|
|
120
|
+
|
|
121
|
+
def _and_node(self, ast: dict) -> str | None:
|
|
122
|
+
conditions = ast.get("conditions") or []
|
|
123
|
+
if not conditions:
|
|
124
|
+
return None
|
|
125
|
+
parts = [self._node(c) for c in conditions]
|
|
126
|
+
parts = [p for p in parts if p is not None]
|
|
127
|
+
if not parts:
|
|
128
|
+
return None
|
|
129
|
+
if len(parts) == 1:
|
|
130
|
+
return parts[0]
|
|
131
|
+
joined = " AND ".join(f"({p})" for p in parts)
|
|
132
|
+
return joined
|
|
133
|
+
|
|
134
|
+
def _or_node(self, ast: dict) -> str | None:
|
|
135
|
+
conditions = ast.get("conditions") or []
|
|
136
|
+
if not conditions:
|
|
137
|
+
return None
|
|
138
|
+
parts = [self._node(c) for c in conditions]
|
|
139
|
+
parts = [p for p in parts if p is not None]
|
|
140
|
+
if not parts:
|
|
141
|
+
return None
|
|
142
|
+
if len(parts) == 1:
|
|
143
|
+
return parts[0]
|
|
144
|
+
joined = " OR ".join(f"({p})" for p in parts)
|
|
145
|
+
return joined
|
|
146
|
+
|
|
147
|
+
def _not_node(self, ast: dict) -> str:
|
|
148
|
+
inner = self._node(ast["condition"])
|
|
149
|
+
return f"NOT ({inner})"
|
|
150
|
+
|
|
151
|
+
# ── Leaf condition ───────────────────────────────────────────────────────
|
|
152
|
+
|
|
153
|
+
def _condition(self, node: dict) -> str:
|
|
154
|
+
field: str = node["field"]
|
|
155
|
+
op: str = node["op"]
|
|
156
|
+
value: Any = node.get("value")
|
|
157
|
+
|
|
158
|
+
na = self._field_alias(field)
|
|
159
|
+
|
|
160
|
+
if op == "eq":
|
|
161
|
+
va = self._value_alias(value)
|
|
162
|
+
return f"{na} = {va}"
|
|
163
|
+
|
|
164
|
+
if op == "ne":
|
|
165
|
+
va = self._value_alias(value)
|
|
166
|
+
return f"{na} <> {va}"
|
|
167
|
+
|
|
168
|
+
if op == "gt":
|
|
169
|
+
va = self._value_alias(value)
|
|
170
|
+
return f"{na} > {va}"
|
|
171
|
+
|
|
172
|
+
if op == "gte":
|
|
173
|
+
va = self._value_alias(value)
|
|
174
|
+
return f"{na} >= {va}"
|
|
175
|
+
|
|
176
|
+
if op == "lt":
|
|
177
|
+
va = self._value_alias(value)
|
|
178
|
+
return f"{na} < {va}"
|
|
179
|
+
|
|
180
|
+
if op == "lte":
|
|
181
|
+
va = self._value_alias(value)
|
|
182
|
+
return f"{na} <= {va}"
|
|
183
|
+
|
|
184
|
+
if op == "contains":
|
|
185
|
+
va = self._value_alias(value)
|
|
186
|
+
return f"contains({na}, {va})"
|
|
187
|
+
|
|
188
|
+
if op == "exists":
|
|
189
|
+
if value:
|
|
190
|
+
return f"attribute_exists({na})"
|
|
191
|
+
else:
|
|
192
|
+
return f"attribute_not_exists({na})"
|
|
193
|
+
|
|
194
|
+
if op == "in":
|
|
195
|
+
# One OR clause per value; the field alias is shared
|
|
196
|
+
clauses = []
|
|
197
|
+
for v in value:
|
|
198
|
+
va = self._value_alias(v)
|
|
199
|
+
clauses.append(f"{na} = {va}")
|
|
200
|
+
return "(" + " OR ".join(clauses) + ")"
|
|
201
|
+
|
|
202
|
+
if op == "nin":
|
|
203
|
+
# One AND clause per value; the field alias is shared
|
|
204
|
+
clauses = []
|
|
205
|
+
for v in value:
|
|
206
|
+
va = self._value_alias(v)
|
|
207
|
+
clauses.append(f"{na} <> {va}")
|
|
208
|
+
return "(" + " AND ".join(clauses) + ")"
|
|
209
|
+
|
|
210
|
+
raise ValueError(f"Unknown filter operator: {op!r}")
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "alt-python-pynosqlc-dynamodb"
|
|
3
|
+
version = "1.0.4"
|
|
4
|
+
description = "DynamoDB driver for pynosqlc"
|
|
5
|
+
readme = "README.md"
|
|
6
|
+
requires-python = ">=3.12"
|
|
7
|
+
dependencies = [
|
|
8
|
+
"alt-python-pynosqlc-core",
|
|
9
|
+
"aioboto3>=2.7",
|
|
10
|
+
]
|
|
11
|
+
authors = [
|
|
12
|
+
{name = "Craig Parravicini"},
|
|
13
|
+
{name = "Claude (Anthropic)"},
|
|
14
|
+
]
|
|
15
|
+
license = {text = "MIT"}
|
|
16
|
+
keywords = ["nosql", "database", "async", "dynamodb", "aws", "driver"]
|
|
17
|
+
classifiers = [
|
|
18
|
+
"Development Status :: 5 - Production/Stable",
|
|
19
|
+
"Framework :: AsyncIO",
|
|
20
|
+
"Intended Audience :: Developers",
|
|
21
|
+
"License :: OSI Approved :: MIT License",
|
|
22
|
+
"Programming Language :: Python :: 3",
|
|
23
|
+
"Programming Language :: Python :: 3.12",
|
|
24
|
+
"Topic :: Database",
|
|
25
|
+
"Topic :: Software Development :: Libraries :: Python Modules",
|
|
26
|
+
]
|
|
27
|
+
|
|
28
|
+
[project.urls]
|
|
29
|
+
Homepage = "https://github.com/alt-python/pynosqlc"
|
|
30
|
+
Repository = "https://github.com/alt-python/pynosqlc"
|
|
31
|
+
Documentation = "https://github.com/alt-python/pynosqlc#getting-started"
|
|
32
|
+
"Bug Tracker" = "https://github.com/alt-python/pynosqlc/issues"
|
|
33
|
+
|
|
34
|
+
[build-system]
|
|
35
|
+
requires = ["hatchling"]
|
|
36
|
+
build-backend = "hatchling.build"
|
|
37
|
+
|
|
38
|
+
[tool.hatch.build.targets.wheel]
|
|
39
|
+
packages = ["pynosqlc"]
|
|
40
|
+
|
|
41
|
+
[tool.uv.sources]
|
|
42
|
+
alt-python-pynosqlc-core = { workspace = true }
|
|
43
|
+
|
|
44
|
+
[tool.pytest.ini_options]
|
|
45
|
+
testpaths = ["tests"]
|
|
46
|
+
asyncio_mode = "auto"
|
|
File without changes
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
"""
|
|
2
|
+
test_compliance.py — DynamoDB driver compliance tests.
|
|
3
|
+
|
|
4
|
+
Wires the shared pynosqlc.core compliance suite into the dynamodb package.
|
|
5
|
+
Each test run gets a fresh DynamoClient connected to a real DynamoDB instance.
|
|
6
|
+
|
|
7
|
+
Set DYNAMODB_ENDPOINT to override the default local endpoint (default: http://localhost:8000).
|
|
8
|
+
Set DYNAMODB_REGION to override the default region (default: us-east-1).
|
|
9
|
+
Tests are skipped automatically if DynamoDB Local is not reachable.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from __future__ import annotations
|
|
13
|
+
|
|
14
|
+
import os
|
|
15
|
+
|
|
16
|
+
import pytest
|
|
17
|
+
|
|
18
|
+
from pynosqlc.core import DriverManager
|
|
19
|
+
from pynosqlc.core.testing import run_compliance
|
|
20
|
+
import pynosqlc.dynamodb # noqa: F401 — registers DynamoDriver on import
|
|
21
|
+
from pynosqlc.dynamodb.dynamo_driver import _driver
|
|
22
|
+
|
|
23
|
+
DYNAMODB_ENDPOINT = os.environ.get("DYNAMODB_ENDPOINT", "http://localhost:8000")
|
|
24
|
+
DYNAMODB_REGION = os.environ.get("DYNAMODB_REGION", "us-east-1")
|
|
25
|
+
DYNAMODB_URL = f"pynosqlc:dynamodb:{DYNAMODB_REGION}"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
async def _factory():
|
|
29
|
+
"""Return a fresh, open DynamoClient for each test class fixture.
|
|
30
|
+
|
|
31
|
+
Clears and re-registers the driver, connects to DynamoDB, deletes any
|
|
32
|
+
leftover compliance tables so ensure_table recreates them cleanly, and
|
|
33
|
+
returns the client.
|
|
34
|
+
|
|
35
|
+
Skips the test if DynamoDB is not reachable.
|
|
36
|
+
"""
|
|
37
|
+
DriverManager.clear()
|
|
38
|
+
DriverManager.register_driver(_driver)
|
|
39
|
+
|
|
40
|
+
try:
|
|
41
|
+
client = await DriverManager.get_client(
|
|
42
|
+
DYNAMODB_URL,
|
|
43
|
+
{"endpoint": DYNAMODB_ENDPOINT},
|
|
44
|
+
)
|
|
45
|
+
except Exception as e:
|
|
46
|
+
pytest.skip(f"DynamoDB not available: {e}")
|
|
47
|
+
|
|
48
|
+
# Delete compliance tables from any prior run so tests are isolated.
|
|
49
|
+
# Deleting the table and removing it from the cache forces ensure_table
|
|
50
|
+
# to recreate it fresh on first use — simpler than clearing items.
|
|
51
|
+
for table_name in ("compliance_kv", "compliance_doc", "compliance_find"):
|
|
52
|
+
try:
|
|
53
|
+
table = await client._resource.Table(table_name)
|
|
54
|
+
await table.delete()
|
|
55
|
+
client._table_cache.discard(table_name)
|
|
56
|
+
except Exception:
|
|
57
|
+
pass # Table may not exist yet; that's fine.
|
|
58
|
+
|
|
59
|
+
return client
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
run_compliance(_factory)
|
|
@@ -0,0 +1,310 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Unit tests for DynamoFilterTranslator.
|
|
3
|
+
|
|
4
|
+
Covers:
|
|
5
|
+
- All 10 operators: eq, ne, gt, gte, lt, lte, contains, in, nin, exists
|
|
6
|
+
- and-node with 1 condition unwraps (no wrapping parens)
|
|
7
|
+
- and-node with 2 conditions → "(expr1) AND (expr2)"
|
|
8
|
+
- or-node with 1 condition unwraps
|
|
9
|
+
- or-node with 2 conditions → "(expr1) OR (expr2)"
|
|
10
|
+
- not-node → "NOT (expr)"
|
|
11
|
+
- None ast → (None, {}, {})
|
|
12
|
+
- empty dict ast → (None, {}, {})
|
|
13
|
+
- empty and/or conditions → (None, {}, {})
|
|
14
|
+
- in_ multi-value expansion
|
|
15
|
+
- nin multi-value expansion
|
|
16
|
+
- exists(True) / exists(False) add no entry to attr_values
|
|
17
|
+
- unknown op raises ValueError
|
|
18
|
+
- unknown node type raises ValueError
|
|
19
|
+
- compound filter uses distinct #nX and :vX aliases with no collisions
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
import pytest
|
|
23
|
+
|
|
24
|
+
from pynosqlc.dynamodb.dynamo_filter_translator import DynamoFilterTranslator
|
|
25
|
+
|
|
26
|
+
translate = DynamoFilterTranslator.translate
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
# ---------------------------------------------------------------------------
|
|
30
|
+
# Helpers
|
|
31
|
+
# ---------------------------------------------------------------------------
|
|
32
|
+
|
|
33
|
+
def cond(field, op, value=None):
|
|
34
|
+
"""Convenience factory for condition AST nodes."""
|
|
35
|
+
node = {"type": "condition", "field": field, "op": op}
|
|
36
|
+
if value is not None:
|
|
37
|
+
node["value"] = value
|
|
38
|
+
return node
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
# ---------------------------------------------------------------------------
|
|
42
|
+
# None / empty input
|
|
43
|
+
# ---------------------------------------------------------------------------
|
|
44
|
+
|
|
45
|
+
class TestEmptyInput:
|
|
46
|
+
def test_none_returns_triple_none(self):
|
|
47
|
+
assert translate(None) == (None, {}, {})
|
|
48
|
+
|
|
49
|
+
def test_empty_dict_returns_triple_none(self):
|
|
50
|
+
assert translate({}) == (None, {}, {})
|
|
51
|
+
|
|
52
|
+
def test_empty_and_conditions_returns_triple_none(self):
|
|
53
|
+
assert translate({"type": "and", "conditions": []}) == (None, {}, {})
|
|
54
|
+
|
|
55
|
+
def test_empty_or_conditions_returns_triple_none(self):
|
|
56
|
+
assert translate({"type": "or", "conditions": []}) == (None, {}, {})
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
# ---------------------------------------------------------------------------
|
|
60
|
+
# All 10 operators on a regular field
|
|
61
|
+
# ---------------------------------------------------------------------------
|
|
62
|
+
|
|
63
|
+
class TestOperators:
|
|
64
|
+
def test_eq(self):
|
|
65
|
+
expr, names, values = translate(cond("name", "eq", "Alice"))
|
|
66
|
+
assert names == {"#n0": "name"}
|
|
67
|
+
assert values == {":v0": "Alice"}
|
|
68
|
+
assert expr == "#n0 = :v0"
|
|
69
|
+
|
|
70
|
+
def test_ne(self):
|
|
71
|
+
expr, names, values = translate(cond("status", "ne", "inactive"))
|
|
72
|
+
assert names == {"#n0": "status"}
|
|
73
|
+
assert values == {":v0": "inactive"}
|
|
74
|
+
assert expr == "#n0 <> :v0"
|
|
75
|
+
|
|
76
|
+
def test_gt(self):
|
|
77
|
+
expr, names, values = translate(cond("age", "gt", 18))
|
|
78
|
+
assert names == {"#n0": "age"}
|
|
79
|
+
assert values == {":v0": 18}
|
|
80
|
+
assert expr == "#n0 > :v0"
|
|
81
|
+
|
|
82
|
+
def test_gte(self):
|
|
83
|
+
expr, names, values = translate(cond("score", "gte", 90))
|
|
84
|
+
assert names == {"#n0": "score"}
|
|
85
|
+
assert values == {":v0": 90}
|
|
86
|
+
assert expr == "#n0 >= :v0"
|
|
87
|
+
|
|
88
|
+
def test_lt(self):
|
|
89
|
+
expr, names, values = translate(cond("price", "lt", 100))
|
|
90
|
+
assert names == {"#n0": "price"}
|
|
91
|
+
assert values == {":v0": 100}
|
|
92
|
+
assert expr == "#n0 < :v0"
|
|
93
|
+
|
|
94
|
+
def test_lte(self):
|
|
95
|
+
expr, names, values = translate(cond("rank", "lte", 5))
|
|
96
|
+
assert names == {"#n0": "rank"}
|
|
97
|
+
assert values == {":v0": 5}
|
|
98
|
+
assert expr == "#n0 <= :v0"
|
|
99
|
+
|
|
100
|
+
def test_contains(self):
|
|
101
|
+
expr, names, values = translate(cond("tags", "contains", "python"))
|
|
102
|
+
assert names == {"#n0": "tags"}
|
|
103
|
+
assert values == {":v0": "python"}
|
|
104
|
+
assert expr == "contains(#n0, :v0)"
|
|
105
|
+
|
|
106
|
+
def test_exists_true(self):
|
|
107
|
+
expr, names, values = translate(cond("email", "exists", True))
|
|
108
|
+
assert names == {"#n0": "email"}
|
|
109
|
+
assert values == {}
|
|
110
|
+
assert expr == "attribute_exists(#n0)"
|
|
111
|
+
|
|
112
|
+
def test_exists_false(self):
|
|
113
|
+
expr, names, values = translate(cond("deleted_at", "exists", False))
|
|
114
|
+
assert names == {"#n0": "deleted_at"}
|
|
115
|
+
assert values == {}
|
|
116
|
+
assert expr == "attribute_not_exists(#n0)"
|
|
117
|
+
|
|
118
|
+
def test_in(self):
|
|
119
|
+
expr, names, values = translate(cond("category", "in", ["a", "b", "c"]))
|
|
120
|
+
assert names == {"#n0": "category"}
|
|
121
|
+
assert values == {":v0": "a", ":v1": "b", ":v2": "c"}
|
|
122
|
+
assert expr == "(#n0 = :v0 OR #n0 = :v1 OR #n0 = :v2)"
|
|
123
|
+
|
|
124
|
+
def test_nin(self):
|
|
125
|
+
expr, names, values = translate(cond("role", "nin", ["admin", "root"]))
|
|
126
|
+
assert names == {"#n0": "role"}
|
|
127
|
+
assert values == {":v0": "admin", ":v1": "root"}
|
|
128
|
+
assert expr == "(#n0 <> :v0 AND #n0 <> :v1)"
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
# ---------------------------------------------------------------------------
|
|
132
|
+
# in / nin expansion
|
|
133
|
+
# ---------------------------------------------------------------------------
|
|
134
|
+
|
|
135
|
+
class TestInNinExpansion:
|
|
136
|
+
def test_in_single_value(self):
|
|
137
|
+
expr, names, values = translate(cond("x", "in", ["only"]))
|
|
138
|
+
assert expr == "(#n0 = :v0)"
|
|
139
|
+
assert values == {":v0": "only"}
|
|
140
|
+
|
|
141
|
+
def test_in_multi_value_aliases_present(self):
|
|
142
|
+
expr, names, values = translate(cond("x", "in", [1, 2, 3, 4]))
|
|
143
|
+
assert set(values.keys()) == {":v0", ":v1", ":v2", ":v3"}
|
|
144
|
+
assert list(values.values()) == [1, 2, 3, 4]
|
|
145
|
+
# All clauses reference the same field alias
|
|
146
|
+
assert "#n0 = :v0" in expr
|
|
147
|
+
assert "#n0 = :v3" in expr
|
|
148
|
+
|
|
149
|
+
def test_nin_produces_and_chain(self):
|
|
150
|
+
expr, names, values = translate(cond("status", "nin", ["a", "b"]))
|
|
151
|
+
assert "AND" in expr
|
|
152
|
+
assert "(#n0 <> :v0 AND #n0 <> :v1)" == expr
|
|
153
|
+
|
|
154
|
+
def test_nin_multi_value(self):
|
|
155
|
+
expr, names, values = translate(cond("x", "nin", [10, 20, 30]))
|
|
156
|
+
assert expr == "(#n0 <> :v0 AND #n0 <> :v1 AND #n0 <> :v2)"
|
|
157
|
+
assert values == {":v0": 10, ":v1": 20, ":v2": 30}
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
# ---------------------------------------------------------------------------
|
|
161
|
+
# exists does not add to attr_values
|
|
162
|
+
# ---------------------------------------------------------------------------
|
|
163
|
+
|
|
164
|
+
class TestExistsNoValue:
|
|
165
|
+
def test_exists_true_no_value_alias(self):
|
|
166
|
+
_, _, values = translate(cond("field", "exists", True))
|
|
167
|
+
assert values == {}
|
|
168
|
+
|
|
169
|
+
def test_exists_false_no_value_alias(self):
|
|
170
|
+
_, _, values = translate(cond("field", "exists", False))
|
|
171
|
+
assert values == {}
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
# ---------------------------------------------------------------------------
|
|
175
|
+
# and-node
|
|
176
|
+
# ---------------------------------------------------------------------------
|
|
177
|
+
|
|
178
|
+
class TestAndNode:
|
|
179
|
+
def test_single_condition_unwraps(self):
|
|
180
|
+
node = {"type": "and", "conditions": [cond("x", "eq", 1)]}
|
|
181
|
+
expr, names, values = translate(node)
|
|
182
|
+
assert expr == "#n0 = :v0"
|
|
183
|
+
assert names == {"#n0": "x"}
|
|
184
|
+
assert values == {":v0": 1}
|
|
185
|
+
|
|
186
|
+
def test_two_conditions_produces_and(self):
|
|
187
|
+
node = {
|
|
188
|
+
"type": "and",
|
|
189
|
+
"conditions": [cond("x", "eq", 1), cond("y", "gt", 0)],
|
|
190
|
+
}
|
|
191
|
+
expr, names, values = translate(node)
|
|
192
|
+
assert expr == "(#n0 = :v0) AND (#n1 > :v1)"
|
|
193
|
+
assert names == {"#n0": "x", "#n1": "y"}
|
|
194
|
+
assert values == {":v0": 1, ":v1": 0}
|
|
195
|
+
|
|
196
|
+
def test_empty_conditions_returns_none(self):
|
|
197
|
+
node = {"type": "and", "conditions": []}
|
|
198
|
+
assert translate(node) == (None, {}, {})
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
# ---------------------------------------------------------------------------
|
|
202
|
+
# or-node
|
|
203
|
+
# ---------------------------------------------------------------------------
|
|
204
|
+
|
|
205
|
+
class TestOrNode:
|
|
206
|
+
def test_single_condition_unwraps(self):
|
|
207
|
+
node = {"type": "or", "conditions": [cond("x", "eq", 1)]}
|
|
208
|
+
expr, names, values = translate(node)
|
|
209
|
+
assert expr == "#n0 = :v0"
|
|
210
|
+
|
|
211
|
+
def test_two_conditions_produces_or(self):
|
|
212
|
+
node = {
|
|
213
|
+
"type": "or",
|
|
214
|
+
"conditions": [cond("a", "lt", 5), cond("b", "gte", 10)],
|
|
215
|
+
}
|
|
216
|
+
expr, names, values = translate(node)
|
|
217
|
+
assert expr == "(#n0 < :v0) OR (#n1 >= :v1)"
|
|
218
|
+
assert names == {"#n0": "a", "#n1": "b"}
|
|
219
|
+
assert values == {":v0": 5, ":v1": 10}
|
|
220
|
+
|
|
221
|
+
def test_empty_conditions_returns_none(self):
|
|
222
|
+
node = {"type": "or", "conditions": []}
|
|
223
|
+
assert translate(node) == (None, {}, {})
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
# ---------------------------------------------------------------------------
|
|
227
|
+
# not-node
|
|
228
|
+
# ---------------------------------------------------------------------------
|
|
229
|
+
|
|
230
|
+
class TestNotNode:
|
|
231
|
+
def test_not_wraps_with_not(self):
|
|
232
|
+
node = {"type": "not", "condition": cond("active", "eq", True)}
|
|
233
|
+
expr, names, values = translate(node)
|
|
234
|
+
assert expr == "NOT (#n0 = :v0)"
|
|
235
|
+
assert names == {"#n0": "active"}
|
|
236
|
+
assert values == {":v0": True}
|
|
237
|
+
|
|
238
|
+
def test_not_with_and_inner(self):
|
|
239
|
+
inner = {
|
|
240
|
+
"type": "and",
|
|
241
|
+
"conditions": [cond("x", "gt", 0), cond("y", "lt", 10)],
|
|
242
|
+
}
|
|
243
|
+
node = {"type": "not", "condition": inner}
|
|
244
|
+
expr, _, _ = translate(node)
|
|
245
|
+
assert expr == "NOT ((#n0 > :v0) AND (#n1 < :v1))"
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
# ---------------------------------------------------------------------------
|
|
249
|
+
# Error cases
|
|
250
|
+
# ---------------------------------------------------------------------------
|
|
251
|
+
|
|
252
|
+
class TestErrors:
|
|
253
|
+
def test_unknown_op_raises_value_error(self):
|
|
254
|
+
with pytest.raises(ValueError, match="Unknown filter operator"):
|
|
255
|
+
translate(cond("x", "regex", ".*"))
|
|
256
|
+
|
|
257
|
+
def test_unknown_node_type_raises_value_error(self):
|
|
258
|
+
with pytest.raises(ValueError, match="Unknown filter AST node type"):
|
|
259
|
+
translate({"type": "xor", "conditions": []})
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
# ---------------------------------------------------------------------------
|
|
263
|
+
# Global counter: no alias collisions in compound filters
|
|
264
|
+
# ---------------------------------------------------------------------------
|
|
265
|
+
|
|
266
|
+
class TestGlobalCounters:
|
|
267
|
+
def test_distinct_field_and_value_aliases(self):
|
|
268
|
+
"""A compound filter over three distinct fields must use #n0/#n1/#n2
|
|
269
|
+
and :v0/:v1/:v2 with no collisions."""
|
|
270
|
+
node = {
|
|
271
|
+
"type": "and",
|
|
272
|
+
"conditions": [
|
|
273
|
+
cond("alpha", "eq", 1),
|
|
274
|
+
cond("beta", "gt", 2),
|
|
275
|
+
cond("gamma", "lte", 3),
|
|
276
|
+
],
|
|
277
|
+
}
|
|
278
|
+
expr, names, values = translate(node)
|
|
279
|
+
# Three distinct field aliases
|
|
280
|
+
assert set(names.keys()) == {"#n0", "#n1", "#n2"}
|
|
281
|
+
assert names["#n0"] == "alpha"
|
|
282
|
+
assert names["#n1"] == "beta"
|
|
283
|
+
assert names["#n2"] == "gamma"
|
|
284
|
+
# Three distinct value aliases
|
|
285
|
+
assert set(values.keys()) == {":v0", ":v1", ":v2"}
|
|
286
|
+
assert values[":v0"] == 1
|
|
287
|
+
assert values[":v1"] == 2
|
|
288
|
+
assert values[":v2"] == 3
|
|
289
|
+
assert expr == "(#n0 = :v0) AND (#n1 > :v1) AND (#n2 <= :v2)"
|
|
290
|
+
|
|
291
|
+
def test_in_then_eq_no_collision(self):
|
|
292
|
+
"""in_ expands multiple :vX aliases; a subsequent field must continue
|
|
293
|
+
from the correct counter, not restart at :v0."""
|
|
294
|
+
node = {
|
|
295
|
+
"type": "and",
|
|
296
|
+
"conditions": [
|
|
297
|
+
cond("status", "in", ["a", "b"]),
|
|
298
|
+
cond("age", "gt", 18),
|
|
299
|
+
],
|
|
300
|
+
}
|
|
301
|
+
expr, names, values = translate(node)
|
|
302
|
+
# status → #n0, age → #n1
|
|
303
|
+
assert names["#n0"] == "status"
|
|
304
|
+
assert names["#n1"] == "age"
|
|
305
|
+
# :v0 and :v1 consumed by in_, :v2 used for age
|
|
306
|
+
assert values[":v0"] == "a"
|
|
307
|
+
assert values[":v1"] == "b"
|
|
308
|
+
assert values[":v2"] == 18
|
|
309
|
+
assert "(#n0 = :v0 OR #n0 = :v1)" in expr
|
|
310
|
+
assert "#n1 > :v2" in expr
|