alt-python-pynosqlc-dynamodb 1.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,88 @@
1
+ Metadata-Version: 2.4
2
+ Name: alt-python-pynosqlc-dynamodb
3
+ Version: 1.0.4
4
+ Summary: DynamoDB driver for pynosqlc
5
+ Project-URL: Homepage, https://github.com/alt-python/pynosqlc
6
+ Project-URL: Repository, https://github.com/alt-python/pynosqlc
7
+ Project-URL: Documentation, https://github.com/alt-python/pynosqlc#getting-started
8
+ Project-URL: Bug Tracker, https://github.com/alt-python/pynosqlc/issues
9
+ Author: Craig Parravicini, Claude (Anthropic)
10
+ License: MIT
11
+ Keywords: async,aws,database,driver,dynamodb,nosql
12
+ Classifier: Development Status :: 5 - Production/Stable
13
+ Classifier: Framework :: AsyncIO
14
+ Classifier: Intended Audience :: Developers
15
+ Classifier: License :: OSI Approved :: MIT License
16
+ Classifier: Programming Language :: Python :: 3
17
+ Classifier: Programming Language :: Python :: 3.12
18
+ Classifier: Topic :: Database
19
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
20
+ Requires-Python: >=3.12
21
+ Requires-Dist: aioboto3>=2.7
22
+ Requires-Dist: alt-python-pynosqlc-core
23
+ Description-Content-Type: text/markdown
24
+
25
+ # pynosqlc-dynamodb
26
+
27
+ DynamoDB driver for [pynosqlc](https://github.com/alt-python/pynosqlc) — connects to Amazon DynamoDB (or DynamoDB Local) via aioboto3.
28
+
29
+ ## Install
30
+
31
+ ```
32
+ pip install alt-python-pynosqlc-dynamodb
33
+ ```
34
+
35
+ ## Requirements
36
+
37
+ - Python 3.12+
38
+ - aioboto3 2.7+
39
+ - AWS credentials configured (`~/.aws/credentials`, environment variables, or IAM role)
40
+ - For local development: [DynamoDB Local](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DynamoDBLocal.html)
41
+
42
+ ## Usage
43
+
44
+ ### AWS (production)
45
+
46
+ ```python
47
+ import asyncio
48
+ from pynosqlc.core import DriverManager, Filter
49
+ import pynosqlc.dynamodb # auto-registers DynamoDriver
50
+
51
+ async def main():
52
+ async with await DriverManager.get_client('pynosqlc:dynamodb:us-east-1') as client:
53
+ col = client.get_collection('orders')
54
+ await col.store('o1', {'item': 'widget', 'qty': 5})
55
+ f = Filter.where('qty').gt(0).build()
56
+ async for doc in await col.find(f):
57
+ print(doc)
58
+
59
+ asyncio.run(main())
60
+ ```
61
+
62
+ ### DynamoDB Local
63
+
64
+ Pass `endpoint` in the properties dict to point at a local instance:
65
+
66
+ ```python
67
+ async with await DriverManager.get_client(
68
+ 'pynosqlc:dynamodb:us-east-1',
69
+ properties={'endpoint': 'http://localhost:8000'},
70
+ ) as client:
71
+ ...
72
+ ```
73
+
74
+ ## URL scheme
75
+
76
+ ```
77
+ pynosqlc:dynamodb:<aws-region>
78
+ ```
79
+
80
+ Example: `pynosqlc:dynamodb:us-east-1`
81
+
82
+ Optional properties:
83
+
84
+ | Key | Description |
85
+ |---|---|
86
+ | `endpoint` | Override endpoint URL (e.g. `http://localhost:8000` for DynamoDB Local) |
87
+ | `aws_access_key_id` | AWS access key (falls back to environment / credential chain) |
88
+ | `aws_secret_access_key` | AWS secret key (falls back to environment / credential chain) |
@@ -0,0 +1,8 @@
1
+ pynosqlc/dynamodb/__init__.py,sha256=g8b_pg2wDOiydXSGfakMTbekgIKi701pydhA0UdsxT0,631
2
+ pynosqlc/dynamodb/dynamo_client.py,sha256=DiSa53aEryKR58jVrqZvx3J8k0x0R7SSVn3j0xLOjlE,3803
3
+ pynosqlc/dynamodb/dynamo_collection.py,sha256=FdDZn3SvasvMQKFSurzgEi5EywJCITOKiqMardLP_KM,4901
4
+ pynosqlc/dynamodb/dynamo_driver.py,sha256=mlIKsiKg_TWUp3_Dc65js7YNr_bU4w-QQN25f7wlsMw,2731
5
+ pynosqlc/dynamodb/dynamo_filter_translator.py,sha256=n4pDOPnhDnIFYhJx--fPdRJnsAHkm_kzf6IDkl2GRH0,7043
6
+ alt_python_pynosqlc_dynamodb-1.0.4.dist-info/METADATA,sha256=FjZ0z-4ybcg_BYdh9COEXcqOoTpu9BexKuvKN5OAutE,2695
7
+ alt_python_pynosqlc_dynamodb-1.0.4.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
8
+ alt_python_pynosqlc_dynamodb-1.0.4.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.29.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,20 @@
1
+ """DynamoDB driver for pynosqlc.
2
+
3
+ Importing this package auto-registers the DynamoDriver with DriverManager.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ from pynosqlc.dynamodb.dynamo_filter_translator import DynamoFilterTranslator
9
+ from pynosqlc.dynamodb.dynamo_client import DynamoClient
10
+ from pynosqlc.dynamodb.dynamo_collection import DynamoCollection
11
+
12
+ # Import last — triggers module-level DriverManager.register_driver() call.
13
+ from pynosqlc.dynamodb.dynamo_driver import DynamoDriver # noqa: F401 (side-effect import)
14
+
15
+ __all__ = [
16
+ "DynamoDriver",
17
+ "DynamoClient",
18
+ "DynamoCollection",
19
+ "DynamoFilterTranslator",
20
+ ]
@@ -0,0 +1,111 @@
1
+ """
2
+ dynamo_client.py — DynamoClient: a pynosqlc Client backed by aioboto3 DynamoDB resource.
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ import botocore.exceptions
8
+
9
+ from pynosqlc.core.client import Client
10
+ from pynosqlc.dynamodb.dynamo_collection import DynamoCollection
11
+
12
+
13
+ class DynamoClient(Client):
14
+ """A pynosqlc Client backed by an aioboto3 DynamoDB resource.
15
+
16
+ Args:
17
+ url: the original ``pynosqlc:dynamodb:<region>`` URL.
18
+ session: an :class:`aioboto3.Session` instance.
19
+ region: the AWS region name.
20
+ endpoint: optional endpoint URL override (e.g. for DynamoDB Local).
21
+ properties: driver-specific properties dict.
22
+ """
23
+
24
+ def __init__(
25
+ self,
26
+ url: str,
27
+ session,
28
+ region: str,
29
+ endpoint: str | None,
30
+ properties: dict | None = None,
31
+ ) -> None:
32
+ super().__init__({"url": url})
33
+ self._session = session
34
+ self._region = region
35
+ self._endpoint = endpoint
36
+ self._properties = properties or {}
37
+ self._resource = None
38
+ self._resource_ctx = None
39
+ self._table_cache: set[str] = set()
40
+
41
+ async def _open(self) -> None:
42
+ """Enter the aioboto3 DynamoDB resource context manager.
43
+
44
+ Must be called by the driver after constructing this client.
45
+ """
46
+ self._resource_ctx = self._session.resource(
47
+ "dynamodb",
48
+ region_name=self._region,
49
+ endpoint_url=self._endpoint,
50
+ )
51
+ self._resource = await self._resource_ctx.__aenter__()
52
+
53
+ def _get_collection(self, name: str) -> DynamoCollection:
54
+ """Create and return a :class:`DynamoCollection` for *name*."""
55
+ return DynamoCollection(self, name)
56
+
57
+ async def _close(self) -> None:
58
+ """Exit the aioboto3 DynamoDB resource context manager."""
59
+ if self._resource_ctx is not None:
60
+ await self._resource_ctx.__aexit__(None, None, None)
61
+ self._resource_ctx = None
62
+ self._resource = None
63
+
64
+ async def ensure_table(self, name: str) -> None:
65
+ """Ensure the DynamoDB table *name* exists, creating it if necessary.
66
+
67
+ Uses ``_pk`` (String) as the partition key and ``PAY_PER_REQUEST``
68
+ billing so no capacity planning is required.
69
+
70
+ Idempotent: if the table already exists (verified or from cache),
71
+ this is a no-op.
72
+
73
+ Args:
74
+ name: the DynamoDB table name.
75
+ """
76
+ if name in self._table_cache:
77
+ return
78
+
79
+ table = await self._resource.Table(name)
80
+
81
+ # Check whether the table exists.
82
+ try:
83
+ await table.load()
84
+ # Table exists — cache it and return.
85
+ self._table_cache.add(name)
86
+ return
87
+ except botocore.exceptions.ClientError as exc:
88
+ code = exc.response["Error"]["Code"]
89
+ if code not in ("ResourceNotFoundException",):
90
+ raise
91
+
92
+ # Table does not exist — create it.
93
+ try:
94
+ await self._resource.create_table(
95
+ TableName=name,
96
+ KeySchema=[{"AttributeName": "_pk", "KeyType": "HASH"}],
97
+ AttributeDefinitions=[
98
+ {"AttributeName": "_pk", "AttributeType": "S"}
99
+ ],
100
+ BillingMode="PAY_PER_REQUEST",
101
+ )
102
+ except botocore.exceptions.ClientError as exc:
103
+ code = exc.response["Error"]["Code"]
104
+ if code not in ("ResourceInUseException", "TableAlreadyExistsException"):
105
+ raise
106
+ # Race: another coroutine already created the table — that's fine.
107
+
108
+ # Wait for the table to become ACTIVE.
109
+ waiter_table = await self._resource.Table(name)
110
+ await waiter_table.wait_until_exists()
111
+ self._table_cache.add(name)
@@ -0,0 +1,133 @@
1
+ """
2
+ dynamo_collection.py — DynamoCollection: a pynosqlc Collection backed by a DynamoDB table.
3
+
4
+ The primary key attribute is ``_pk`` (String). All other document fields are
5
+ stored as top-level DynamoDB item attributes.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import uuid
11
+
12
+ from pynosqlc.core.collection import Collection
13
+ from pynosqlc.core.cursor import Cursor
14
+ from pynosqlc.dynamodb.dynamo_filter_translator import DynamoFilterTranslator
15
+
16
+
17
+ class DynamoCollection(Collection):
18
+ """A pynosqlc Collection backed by a DynamoDB table.
19
+
20
+ The DynamoDB table is created on first access with ``_pk`` as the
21
+ partition key. :meth:`DynamoClient.ensure_table` is called at the start
22
+ of every operation — it is a no-op after the first successful call.
23
+
24
+ Args:
25
+ client: the owning :class:`~pynosqlc.dynamodb.DynamoClient`.
26
+ name: the table / collection name.
27
+ """
28
+
29
+ def __init__(self, client, name: str) -> None:
30
+ super().__init__(client, name)
31
+
32
+ async def _get(self, key: str) -> dict | None:
33
+ """Retrieve a document by its ``_pk``."""
34
+ await self._client.ensure_table(self._name)
35
+ table = await self._client._resource.Table(self._name)
36
+
37
+ resp = await table.get_item(Key={"_pk": key})
38
+ item = resp.get("Item")
39
+ if item is None:
40
+ return None
41
+ return {k: v for k, v in item.items() if k != "_pk"}
42
+
43
+ async def _store(self, key: str, doc: dict) -> None:
44
+ """Upsert a document, setting ``_pk = key``."""
45
+ await self._client.ensure_table(self._name)
46
+ table = await self._client._resource.Table(self._name)
47
+
48
+ await table.put_item(Item={**doc, "_pk": key})
49
+
50
+ async def _delete(self, key: str) -> None:
51
+ """Delete the document at ``_pk = key``."""
52
+ await self._client.ensure_table(self._name)
53
+ table = await self._client._resource.Table(self._name)
54
+
55
+ await table.delete_item(Key={"_pk": key})
56
+
57
+ async def _insert(self, doc: dict) -> str:
58
+ """Insert a document with a generated UUID ``_pk``; return the id."""
59
+ await self._client.ensure_table(self._name)
60
+ table = await self._client._resource.Table(self._name)
61
+
62
+ id_ = str(uuid.uuid4())
63
+ await table.put_item(Item={**doc, "_pk": id_})
64
+ return id_
65
+
66
+ async def _update(self, key: str, patch: dict) -> None:
67
+ """Patch the document at ``_pk = key`` using a SET expression.
68
+
69
+ Only provided fields are updated; others are preserved.
70
+ ``_pk`` is never patched even if present in *patch*.
71
+ """
72
+ await self._client.ensure_table(self._name)
73
+ table = await self._client._resource.Table(self._name)
74
+
75
+ # Build SET expression: skip _pk to avoid overwriting the partition key.
76
+ fields = [(k, v) for k, v in patch.items() if k != "_pk"]
77
+ if not fields:
78
+ return
79
+
80
+ set_parts = []
81
+ expr_names: dict[str, str] = {}
82
+ expr_values: dict[str, object] = {}
83
+
84
+ for i, (field, value) in enumerate(fields):
85
+ name_alias = f"#attr{i}"
86
+ value_alias = f":val{i}"
87
+ set_parts.append(f"{name_alias} = {value_alias}")
88
+ expr_names[name_alias] = field
89
+ expr_values[value_alias] = value
90
+
91
+ update_expr = "SET " + ", ".join(set_parts)
92
+
93
+ await table.update_item(
94
+ Key={"_pk": key},
95
+ UpdateExpression=update_expr,
96
+ ExpressionAttributeNames=expr_names,
97
+ ExpressionAttributeValues=expr_values,
98
+ )
99
+
100
+ async def _find(self, ast: dict) -> Cursor:
101
+ """Find documents matching the filter AST.
102
+
103
+ Translates the AST to a DynamoDB FilterExpression, performs a full
104
+ table scan with automatic pagination, strips ``_pk`` from each item,
105
+ and wraps results in a :class:`~pynosqlc.core.Cursor`.
106
+ """
107
+ await self._client.ensure_table(self._name)
108
+ table = await self._client._resource.Table(self._name)
109
+
110
+ filter_expr, attr_names, attr_values = DynamoFilterTranslator.translate(ast)
111
+
112
+ scan_kwargs: dict = {}
113
+ if filter_expr is not None:
114
+ scan_kwargs["FilterExpression"] = filter_expr
115
+ scan_kwargs["ExpressionAttributeNames"] = attr_names
116
+ if attr_values:
117
+ scan_kwargs["ExpressionAttributeValues"] = attr_values
118
+
119
+ # Paginated scan.
120
+ items: list[dict] = []
121
+ resp = await table.scan(**scan_kwargs)
122
+ items.extend(resp.get("Items", []))
123
+
124
+ while "LastEvaluatedKey" in resp:
125
+ resp = await table.scan(
126
+ ExclusiveStartKey=resp["LastEvaluatedKey"],
127
+ **scan_kwargs,
128
+ )
129
+ items.extend(resp.get("Items", []))
130
+
131
+ # Strip the internal _pk field before returning to callers.
132
+ docs = [{k: v for k, v in item.items() if k != "_pk"} for item in items]
133
+ return Cursor(docs)
@@ -0,0 +1,86 @@
1
+ """
2
+ dynamo_driver.py — DynamoDriver: connects to DynamoDB via aioboto3.
3
+
4
+ URL scheme: pynosqlc:dynamodb:<region>
5
+ e.g. pynosqlc:dynamodb:us-east-1
6
+
7
+ Auto-registers with DriverManager on import.
8
+ """
9
+
10
+ from __future__ import annotations
11
+
12
+ import os
13
+
14
+ import aioboto3
15
+
16
+ from pynosqlc.core.driver import Driver
17
+ from pynosqlc.core.driver_manager import DriverManager
18
+ from pynosqlc.dynamodb.dynamo_client import DynamoClient
19
+
20
+
21
+ class DynamoDriver(Driver):
22
+ """Driver that creates :class:`DynamoClient` instances.
23
+
24
+ URL prefix: ``pynosqlc:dynamodb:``
25
+ """
26
+
27
+ URL_PREFIX: str = "pynosqlc:dynamodb:"
28
+
29
+ def accepts_url(self, url: str) -> bool:
30
+ """Return ``True`` for ``'pynosqlc:dynamodb:'`` URLs."""
31
+ return isinstance(url, str) and url.startswith(self.URL_PREFIX)
32
+
33
+ async def connect(
34
+ self,
35
+ url: str,
36
+ properties: dict | None = None,
37
+ ) -> DynamoClient:
38
+ """Create and return an open :class:`DynamoClient`.
39
+
40
+ Args:
41
+ url: ``pynosqlc:dynamodb:<region>``
42
+ properties: optional dict; supports:
43
+ - ``endpoint``: override endpoint URL (e.g. for DynamoDB Local)
44
+ - ``aws_access_key_id``: AWS access key (optional)
45
+ - ``aws_secret_access_key``: AWS secret key (optional)
46
+
47
+ Returns:
48
+ An open :class:`DynamoClient`.
49
+ """
50
+ props = properties or {}
51
+
52
+ region = url[len(self.URL_PREFIX):]
53
+ if not region:
54
+ region = "us-east-1"
55
+
56
+ endpoint = props.get("endpoint") or os.environ.get("DYNAMODB_ENDPOINT")
57
+
58
+ session_kwargs: dict = {}
59
+ if endpoint:
60
+ # When using a local endpoint (DynamoDB Local / LocalStack) without
61
+ # real credentials, supply dummy values so boto3 does not raise a
62
+ # NoCredentialsError.
63
+ has_real_creds = (
64
+ os.environ.get("AWS_ACCESS_KEY_ID")
65
+ or os.environ.get("AWS_PROFILE")
66
+ or os.environ.get("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI")
67
+ or os.environ.get("AWS_WEB_IDENTITY_TOKEN_FILE")
68
+ )
69
+ if not has_real_creds:
70
+ session_kwargs["aws_access_key_id"] = props.get(
71
+ "aws_access_key_id", "dummy"
72
+ )
73
+ session_kwargs["aws_secret_access_key"] = props.get(
74
+ "aws_secret_access_key", "dummy"
75
+ )
76
+
77
+ session = aioboto3.Session(**session_kwargs)
78
+
79
+ client = DynamoClient(url, session, region, endpoint, props)
80
+ await client._open()
81
+ return client
82
+
83
+
84
+ # Auto-register on import — a single shared instance is sufficient.
85
+ _driver = DynamoDriver()
86
+ DriverManager.register_driver(_driver)
@@ -0,0 +1,210 @@
1
+ """
2
+ dynamo_filter_translator.py — Translates a pynosqlc Filter AST to a DynamoDB
3
+ FilterExpression triple.
4
+
5
+ Returns
6
+ -------
7
+ tuple[str | None, dict, dict]
8
+ (filter_expression, expression_attribute_names, expression_attribute_values)
9
+
10
+ When the AST is None or empty (no conditions), returns ``(None, {}, {})``.
11
+
12
+ Design
13
+ ------
14
+ A fresh ``_TranslatorState`` is created for each ``translate()`` call. The
15
+ state carries monotonically-increasing counters for field-name aliases
16
+ (``#n0``, ``#n1``, …) and value aliases (``:v0``, ``:v1``, …) so that
17
+ compound filters across multiple fields never produce collisions.
18
+
19
+ All field references go through ``ExpressionAttributeNames`` to avoid
20
+ DynamoDB reserved-word conflicts.
21
+
22
+ Supported operators
23
+ -------------------
24
+ eq, ne, gt, gte, lt, lte, contains, in, nin, exists
25
+
26
+ Composite node types
27
+ --------------------
28
+ and, or, not
29
+ """
30
+
31
+ from __future__ import annotations
32
+
33
+ from typing import Any
34
+
35
+
36
+ class DynamoFilterTranslator:
37
+ """Stateless translator from pynosqlc Filter AST to DynamoDB expression triple."""
38
+
39
+ @staticmethod
40
+ def translate(
41
+ ast: dict | None,
42
+ ) -> tuple[str | None, dict, dict]:
43
+ """Translate a Filter AST node to a DynamoDB expression triple.
44
+
45
+ Args:
46
+ ast: a Filter AST node, or ``None`` / empty dict (matches all).
47
+
48
+ Returns:
49
+ A tuple of ``(filter_expression, expression_attribute_names,
50
+ expression_attribute_values)``. When the AST is falsy or has no
51
+ conditions, returns ``(None, {}, {})``.
52
+
53
+ Raises:
54
+ ValueError: if an unknown AST node type or operator is encountered.
55
+ """
56
+ if not ast:
57
+ return (None, {}, {})
58
+
59
+ # An 'and' node with an empty conditions list also means "match all"
60
+ if ast.get("type") in ("and", "or") and not ast.get("conditions"):
61
+ return (None, {}, {})
62
+
63
+ state = _TranslatorState()
64
+ expr = state._node(ast)
65
+ if expr is None:
66
+ return (None, {}, {})
67
+ return (expr, state.attr_names, state.attr_values)
68
+
69
+
70
+ # ---------------------------------------------------------------------------
71
+ # Internal stateful translator
72
+ # ---------------------------------------------------------------------------
73
+
74
+ class _TranslatorState:
75
+ """Carries mutable translation state for a single translate() call."""
76
+
77
+ def __init__(self) -> None:
78
+ self.name_idx: int = 0
79
+ self.value_idx: int = 0
80
+ self.attr_names: dict[str, str] = {}
81
+ self.attr_values: dict[str, Any] = {}
82
+
83
+ # ── Counter helpers ──────────────────────────────────────────────────────
84
+
85
+ def _field_alias(self, field: str) -> str:
86
+ """Allocate the next #nX alias for *field* and record the mapping."""
87
+ alias = f"#n{self.name_idx}"
88
+ self.attr_names[alias] = field
89
+ self.name_idx += 1
90
+ return alias
91
+
92
+ def _value_alias(self, value: Any) -> str:
93
+ """Allocate the next :vX alias for *value* and record the mapping."""
94
+ alias = f":v{self.value_idx}"
95
+ self.attr_values[alias] = value
96
+ self.value_idx += 1
97
+ return alias
98
+
99
+ # ── Node dispatcher ──────────────────────────────────────────────────────
100
+
101
+ def _node(self, ast: dict) -> str | None:
102
+ """Recursively translate an AST node to an expression string."""
103
+ node_type = ast.get("type")
104
+
105
+ if node_type == "and":
106
+ return self._and_node(ast)
107
+
108
+ if node_type == "or":
109
+ return self._or_node(ast)
110
+
111
+ if node_type == "not":
112
+ return self._not_node(ast)
113
+
114
+ if node_type == "condition":
115
+ return self._condition(ast)
116
+
117
+ raise ValueError(f"Unknown filter AST node type: {node_type!r}")
118
+
119
+ # ── Composite nodes ──────────────────────────────────────────────────────
120
+
121
+ def _and_node(self, ast: dict) -> str | None:
122
+ conditions = ast.get("conditions") or []
123
+ if not conditions:
124
+ return None
125
+ parts = [self._node(c) for c in conditions]
126
+ parts = [p for p in parts if p is not None]
127
+ if not parts:
128
+ return None
129
+ if len(parts) == 1:
130
+ return parts[0]
131
+ joined = " AND ".join(f"({p})" for p in parts)
132
+ return joined
133
+
134
+ def _or_node(self, ast: dict) -> str | None:
135
+ conditions = ast.get("conditions") or []
136
+ if not conditions:
137
+ return None
138
+ parts = [self._node(c) for c in conditions]
139
+ parts = [p for p in parts if p is not None]
140
+ if not parts:
141
+ return None
142
+ if len(parts) == 1:
143
+ return parts[0]
144
+ joined = " OR ".join(f"({p})" for p in parts)
145
+ return joined
146
+
147
+ def _not_node(self, ast: dict) -> str:
148
+ inner = self._node(ast["condition"])
149
+ return f"NOT ({inner})"
150
+
151
+ # ── Leaf condition ───────────────────────────────────────────────────────
152
+
153
+ def _condition(self, node: dict) -> str:
154
+ field: str = node["field"]
155
+ op: str = node["op"]
156
+ value: Any = node.get("value")
157
+
158
+ na = self._field_alias(field)
159
+
160
+ if op == "eq":
161
+ va = self._value_alias(value)
162
+ return f"{na} = {va}"
163
+
164
+ if op == "ne":
165
+ va = self._value_alias(value)
166
+ return f"{na} <> {va}"
167
+
168
+ if op == "gt":
169
+ va = self._value_alias(value)
170
+ return f"{na} > {va}"
171
+
172
+ if op == "gte":
173
+ va = self._value_alias(value)
174
+ return f"{na} >= {va}"
175
+
176
+ if op == "lt":
177
+ va = self._value_alias(value)
178
+ return f"{na} < {va}"
179
+
180
+ if op == "lte":
181
+ va = self._value_alias(value)
182
+ return f"{na} <= {va}"
183
+
184
+ if op == "contains":
185
+ va = self._value_alias(value)
186
+ return f"contains({na}, {va})"
187
+
188
+ if op == "exists":
189
+ if value:
190
+ return f"attribute_exists({na})"
191
+ else:
192
+ return f"attribute_not_exists({na})"
193
+
194
+ if op == "in":
195
+ # One OR clause per value; the field alias is shared
196
+ clauses = []
197
+ for v in value:
198
+ va = self._value_alias(v)
199
+ clauses.append(f"{na} = {va}")
200
+ return "(" + " OR ".join(clauses) + ")"
201
+
202
+ if op == "nin":
203
+ # One AND clause per value; the field alias is shared
204
+ clauses = []
205
+ for v in value:
206
+ va = self._value_alias(v)
207
+ clauses.append(f"{na} <> {va}")
208
+ return "(" + " AND ".join(clauses) + ")"
209
+
210
+ raise ValueError(f"Unknown filter operator: {op!r}")