hammad-python 0.0.18__py3-none-any.whl → 0.0.20__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hammad/__init__.py +7 -137
- hammad/_internal.py +1 -0
- hammad/cli/_runner.py +8 -8
- hammad/cli/plugins.py +55 -26
- hammad/cli/styles/utils.py +16 -8
- hammad/data/__init__.py +1 -5
- hammad/data/collections/__init__.py +2 -3
- hammad/data/collections/collection.py +41 -22
- hammad/data/collections/indexes/__init__.py +1 -1
- hammad/data/collections/indexes/qdrant/__init__.py +1 -1
- hammad/data/collections/indexes/qdrant/index.py +106 -118
- hammad/data/collections/indexes/qdrant/settings.py +14 -14
- hammad/data/collections/indexes/qdrant/utils.py +28 -38
- hammad/data/collections/indexes/tantivy/__init__.py +1 -1
- hammad/data/collections/indexes/tantivy/index.py +57 -59
- hammad/data/collections/indexes/tantivy/settings.py +8 -19
- hammad/data/collections/indexes/tantivy/utils.py +28 -52
- hammad/data/models/__init__.py +2 -7
- hammad/data/sql/__init__.py +1 -1
- hammad/data/sql/database.py +71 -73
- hammad/data/sql/types.py +37 -51
- hammad/formatting/__init__.py +2 -1
- hammad/formatting/json/converters.py +2 -2
- hammad/genai/__init__.py +96 -36
- hammad/genai/agents/__init__.py +47 -1
- hammad/genai/agents/agent.py +1022 -0
- hammad/genai/agents/run.py +615 -0
- hammad/genai/agents/types/__init__.py +29 -22
- hammad/genai/agents/types/agent_context.py +13 -0
- hammad/genai/agents/types/agent_event.py +128 -0
- hammad/genai/agents/types/agent_hooks.py +220 -0
- hammad/genai/agents/types/agent_messages.py +31 -0
- hammad/genai/agents/types/agent_response.py +90 -0
- hammad/genai/agents/types/agent_stream.py +242 -0
- hammad/genai/models/__init__.py +1 -0
- hammad/genai/models/embeddings/__init__.py +39 -0
- hammad/genai/{embedding_models/embedding_model.py → models/embeddings/model.py} +45 -41
- hammad/genai/{embedding_models → models/embeddings}/run.py +10 -8
- hammad/genai/models/embeddings/types/__init__.py +37 -0
- hammad/genai/{embedding_models → models/embeddings/types}/embedding_model_name.py +2 -4
- hammad/genai/{embedding_models → models/embeddings/types}/embedding_model_response.py +11 -4
- hammad/genai/{embedding_models/embedding_model_request.py → models/embeddings/types/embedding_model_run_params.py} +4 -3
- hammad/genai/models/embeddings/types/embedding_model_settings.py +47 -0
- hammad/genai/models/language/__init__.py +48 -0
- hammad/genai/{language_models/language_model.py → models/language/model.py} +481 -204
- hammad/genai/{language_models → models/language}/run.py +80 -57
- hammad/genai/models/language/types/__init__.py +40 -0
- hammad/genai/models/language/types/language_model_instructor_mode.py +47 -0
- hammad/genai/models/language/types/language_model_messages.py +28 -0
- hammad/genai/{language_models/_types.py → models/language/types/language_model_name.py} +3 -40
- hammad/genai/{language_models → models/language/types}/language_model_request.py +17 -25
- hammad/genai/{language_models → models/language/types}/language_model_response.py +61 -68
- hammad/genai/{language_models → models/language/types}/language_model_response_chunk.py +8 -5
- hammad/genai/models/language/types/language_model_settings.py +89 -0
- hammad/genai/{language_models/_streaming.py → models/language/types/language_model_stream.py} +221 -243
- hammad/genai/{language_models/_utils → models/language/utils}/__init__.py +8 -11
- hammad/genai/models/language/utils/requests.py +421 -0
- hammad/genai/{language_models/_utils/_structured_outputs.py → models/language/utils/structured_outputs.py} +31 -20
- hammad/genai/models/model_provider.py +4 -0
- hammad/genai/{multimodal_models.py → models/multimodal.py} +4 -5
- hammad/genai/models/reranking.py +26 -0
- hammad/genai/types/__init__.py +1 -0
- hammad/genai/types/base.py +215 -0
- hammad/genai/{agents/types → types}/history.py +101 -88
- hammad/genai/{agents/types/tool.py → types/tools.py} +156 -141
- hammad/logging/logger.py +2 -1
- hammad/mcp/client/__init__.py +2 -3
- hammad/mcp/client/client.py +10 -10
- hammad/mcp/servers/__init__.py +2 -1
- hammad/service/decorators.py +1 -3
- hammad/web/models.py +1 -3
- hammad/web/search/client.py +10 -22
- {hammad_python-0.0.18.dist-info → hammad_python-0.0.20.dist-info}/METADATA +10 -2
- hammad_python-0.0.20.dist-info/RECORD +127 -0
- hammad/genai/embedding_models/__init__.py +0 -41
- hammad/genai/language_models/__init__.py +0 -35
- hammad/genai/language_models/_utils/_completions.py +0 -131
- hammad/genai/language_models/_utils/_messages.py +0 -89
- hammad/genai/language_models/_utils/_requests.py +0 -202
- hammad/genai/rerank_models.py +0 -26
- hammad_python-0.0.18.dist-info/RECORD +0 -111
- {hammad_python-0.0.18.dist-info → hammad_python-0.0.20.dist-info}/WHEEL +0 -0
- {hammad_python-0.0.18.dist-info → hammad_python-0.0.20.dist-info}/licenses/LICENSE +0 -0
@@ -1,25 +1,15 @@
|
|
1
1
|
"""hammad.data.collections.indexes.tantivy.utils"""
|
2
2
|
|
3
|
-
from dataclasses import
|
4
|
-
dataclass,
|
5
|
-
is_dataclass,
|
6
|
-
asdict
|
7
|
-
)
|
3
|
+
from dataclasses import dataclass, is_dataclass, asdict
|
8
4
|
from msgspec import json
|
9
|
-
from typing import
|
10
|
-
Any,
|
11
|
-
Dict,
|
12
|
-
List,
|
13
|
-
Optional,
|
14
|
-
final
|
15
|
-
)
|
5
|
+
from typing import Any, Dict, List, Optional, final
|
16
6
|
|
17
7
|
import tantivy
|
18
8
|
|
19
9
|
from .....cache import cached
|
20
10
|
from .settings import (
|
21
11
|
TantivyCollectionIndexSettings,
|
22
|
-
TantivyCollectionIndexQuerySettings
|
12
|
+
TantivyCollectionIndexQuerySettings,
|
23
13
|
)
|
24
14
|
|
25
15
|
|
@@ -37,20 +27,20 @@ class TantivyCollectionIndexError(Exception):
|
|
37
27
|
class TantivyIndexWrapper:
|
38
28
|
"""Wrapper over the `tantivy` index object."""
|
39
29
|
|
40
|
-
index
|
30
|
+
index: tantivy.Index
|
41
31
|
"""The `tantivy` index object."""
|
42
32
|
|
43
|
-
schema
|
33
|
+
schema: tantivy.Schema
|
44
34
|
"""The `tantivy` schema object."""
|
45
35
|
|
46
|
-
index_writer
|
36
|
+
index_writer: Any
|
47
37
|
"""The `tantivy` index writer object."""
|
48
38
|
|
49
39
|
|
50
40
|
@cached
|
51
41
|
def match_filters_for_query(
|
52
|
-
stored_filters
|
53
|
-
query_filters
|
42
|
+
stored_filters: Dict[str, Any] | None = None,
|
43
|
+
query_filters: Dict[str, Any] | None = None,
|
54
44
|
) -> bool:
|
55
45
|
"""Checks if stored filters match query filters."""
|
56
46
|
if query_filters is None:
|
@@ -61,9 +51,7 @@ def match_filters_for_query(
|
|
61
51
|
|
62
52
|
|
63
53
|
@cached
|
64
|
-
def serialize(
|
65
|
-
obj : Any
|
66
|
-
) -> Any:
|
54
|
+
def serialize(obj: Any) -> Any:
|
67
55
|
"""Serializes an object to JSON."""
|
68
56
|
try:
|
69
57
|
return json.decode(json.encode(obj))
|
@@ -85,7 +73,7 @@ def serialize(
|
|
85
73
|
|
86
74
|
@cached
|
87
75
|
def build_tantivy_index_from_settings(
|
88
|
-
settings
|
76
|
+
settings: TantivyCollectionIndexSettings,
|
89
77
|
) -> TantivyIndexWrapper:
|
90
78
|
"""Builds a new `tantivy` index from the given settings."""
|
91
79
|
# Init schema for index
|
@@ -93,48 +81,41 @@ def build_tantivy_index_from_settings(
|
|
93
81
|
|
94
82
|
# Add fields
|
95
83
|
# ID (stored and indexed)
|
96
|
-
schema_builder.add_text_field(
|
97
|
-
"id",
|
98
|
-
**settings.get_tantivy_config()["text_fields"]
|
99
|
-
)
|
84
|
+
schema_builder.add_text_field("id", **settings.get_tantivy_config()["text_fields"])
|
100
85
|
# Content (stored and indexed) Contains entry content
|
101
86
|
schema_builder.add_text_field(
|
102
87
|
"content",
|
103
88
|
**{
|
104
89
|
**settings.get_tantivy_config()["text_fields"],
|
105
|
-
"tokenizer_name"
|
106
|
-
"index_option"
|
107
|
-
}
|
90
|
+
"tokenizer_name": "default",
|
91
|
+
"index_option": "position",
|
92
|
+
},
|
108
93
|
)
|
109
94
|
# Title (stored and indexed) Contains entry title
|
110
95
|
schema_builder.add_text_field(
|
111
96
|
"title",
|
112
97
|
**{
|
113
98
|
**settings.get_tantivy_config()["text_fields"],
|
114
|
-
"tokenizer_name"
|
115
|
-
"index_option"
|
116
|
-
}
|
99
|
+
"tokenizer_name": "default",
|
100
|
+
"index_option": "position",
|
101
|
+
},
|
117
102
|
)
|
118
103
|
# JSON (stored) Contains actual entry data
|
119
104
|
schema_builder.add_json_field(
|
120
|
-
"data",
|
121
|
-
**settings.get_tantivy_config()["json_fields"]
|
105
|
+
"data", **settings.get_tantivy_config()["json_fields"]
|
122
106
|
)
|
123
107
|
|
124
108
|
# Timestamps
|
125
109
|
schema_builder.add_date_field(
|
126
|
-
"created_at",
|
127
|
-
**settings.get_tantivy_config()["date_fields"]
|
110
|
+
"created_at", **settings.get_tantivy_config()["date_fields"]
|
128
111
|
)
|
129
112
|
schema_builder.add_date_field(
|
130
|
-
"expires_at",
|
131
|
-
**settings.get_tantivy_config()["date_fields"]
|
113
|
+
"expires_at", **settings.get_tantivy_config()["date_fields"]
|
132
114
|
)
|
133
115
|
|
134
116
|
# Sorting / Scoring
|
135
117
|
schema_builder.add_integer_field(
|
136
|
-
"score",
|
137
|
-
**settings.get_tantivy_config()["numeric_fields"]
|
118
|
+
"score", **settings.get_tantivy_config()["numeric_fields"]
|
138
119
|
)
|
139
120
|
|
140
121
|
# Facet for Optional filters
|
@@ -151,7 +132,9 @@ def build_tantivy_index_from_settings(
|
|
151
132
|
if "writer_heap_size" in settings.get_tantivy_config():
|
152
133
|
writer_config["heap_size"] = settings.get_tantivy_config()["writer_heap_size"]
|
153
134
|
if "writer_num_threads" in settings.get_tantivy_config():
|
154
|
-
writer_config["num_threads"] = settings.get_tantivy_config()[
|
135
|
+
writer_config["num_threads"] = settings.get_tantivy_config()[
|
136
|
+
"writer_num_threads"
|
137
|
+
]
|
155
138
|
|
156
139
|
index_writer = index.writer(**writer_config)
|
157
140
|
|
@@ -160,16 +143,9 @@ def build_tantivy_index_from_settings(
|
|
160
143
|
if reader_config:
|
161
144
|
reload_policy = reader_config.get("reload_policy", "commit")
|
162
145
|
num_warmers = reader_config.get("num_warmers", 0)
|
163
|
-
index.config_reader(
|
164
|
-
|
165
|
-
|
166
|
-
)
|
167
|
-
|
168
|
-
return TantivyIndexWrapper(
|
169
|
-
schema=schema,
|
170
|
-
index=index,
|
171
|
-
index_writer=index_writer
|
172
|
-
)
|
146
|
+
index.config_reader(reload_policy=reload_policy, num_warmers=num_warmers)
|
147
|
+
|
148
|
+
return TantivyIndexWrapper(schema=schema, index=index, index_writer=index_writer)
|
173
149
|
|
174
150
|
|
175
151
|
@cached
|
@@ -197,4 +173,4 @@ def extract_content_for_indexing(value: Any) -> str:
|
|
197
173
|
content_parts.append(str(item))
|
198
174
|
return " ".join(content_parts)
|
199
175
|
else:
|
200
|
-
return str(value)
|
176
|
+
return str(value)
|
hammad/data/models/__init__.py
CHANGED
@@ -12,9 +12,7 @@ if TYPE_CHECKING:
|
|
12
12
|
Model,
|
13
13
|
model_settings,
|
14
14
|
)
|
15
|
-
from .fields import
|
16
|
-
field
|
17
|
-
)
|
15
|
+
from .fields import field
|
18
16
|
from .utils import (
|
19
17
|
validator,
|
20
18
|
is_field,
|
@@ -23,7 +21,7 @@ if TYPE_CHECKING:
|
|
23
21
|
from .extensions.pydantic.converters import (
|
24
22
|
convert_to_pydantic_model,
|
25
23
|
convert_to_pydantic_field,
|
26
|
-
is_pydantic_model_class
|
24
|
+
is_pydantic_model_class,
|
27
25
|
)
|
28
26
|
|
29
27
|
|
@@ -31,16 +29,13 @@ __all__ = (
|
|
31
29
|
# hammad.lib.data.models.model
|
32
30
|
"Model",
|
33
31
|
"model_settings",
|
34
|
-
|
35
32
|
# hammad.lib.data.models.fields
|
36
33
|
"field",
|
37
|
-
|
38
34
|
# hammad.lib.data.models.utils
|
39
35
|
"validator",
|
40
36
|
"is_field",
|
41
37
|
"is_model",
|
42
38
|
"model_settings",
|
43
|
-
|
44
39
|
# hammad.lib.data.models.extensions.pydantic.converters
|
45
40
|
"convert_to_pydantic_model",
|
46
41
|
"convert_to_pydantic_field",
|
hammad/data/sql/__init__.py
CHANGED
hammad/data/sql/database.py
CHANGED
@@ -21,7 +21,7 @@ try:
|
|
21
21
|
create_engine,
|
22
22
|
Column,
|
23
23
|
String,
|
24
|
-
Text,
|
24
|
+
Text,
|
25
25
|
DateTime,
|
26
26
|
Integer,
|
27
27
|
MetaData,
|
@@ -36,6 +36,7 @@ try:
|
|
36
36
|
)
|
37
37
|
from sqlalchemy.orm import sessionmaker, Session
|
38
38
|
from sqlalchemy.sql import Select
|
39
|
+
|
39
40
|
SQLALCHEMY_AVAILABLE = True
|
40
41
|
except ImportError:
|
41
42
|
# SQLAlchemy not available
|
@@ -69,7 +70,7 @@ class Database(Generic[DatabaseItemType]):
|
|
69
70
|
"""
|
70
71
|
A clean SQL-based database implementation using SQLAlchemy that provides
|
71
72
|
the lowest-level storage backend for collections.
|
72
|
-
|
73
|
+
|
73
74
|
Features:
|
74
75
|
- Optional schema validation
|
75
76
|
- Custom path format support (memory or file-based)
|
@@ -91,7 +92,7 @@ class Database(Generic[DatabaseItemType]):
|
|
91
92
|
) -> None:
|
92
93
|
"""
|
93
94
|
Initialize a new Database instance.
|
94
|
-
|
95
|
+
|
95
96
|
Args:
|
96
97
|
name: The name of the database
|
97
98
|
schema: Optional schema type for validation
|
@@ -168,7 +169,7 @@ class Database(Generic[DatabaseItemType]):
|
|
168
169
|
def _serialize_item(self, item: DatabaseItemType) -> str:
|
169
170
|
"""Serialize an item to JSON string."""
|
170
171
|
from dataclasses import is_dataclass, asdict
|
171
|
-
|
172
|
+
|
172
173
|
if isinstance(item, (str, int, float, bool, type(None))):
|
173
174
|
return json.dumps(item)
|
174
175
|
elif isinstance(item, (list, dict)):
|
@@ -188,9 +189,7 @@ class Database(Generic[DatabaseItemType]):
|
|
188
189
|
"""Validate item against schema if one is set."""
|
189
190
|
if self.schema is not None:
|
190
191
|
if not isinstance(item, self.schema):
|
191
|
-
raise ValueError(
|
192
|
-
f"Item is not of type {self.schema.__name__}"
|
193
|
-
)
|
192
|
+
raise ValueError(f"Item is not of type {self.schema.__name__}")
|
194
193
|
|
195
194
|
def _build_query_conditions(
|
196
195
|
self,
|
@@ -199,12 +198,12 @@ class Database(Generic[DatabaseItemType]):
|
|
199
198
|
) -> Any:
|
200
199
|
"""Build SQLAlchemy query conditions from QueryFilter."""
|
201
200
|
conditions = []
|
202
|
-
|
201
|
+
|
203
202
|
for condition in query_filter.conditions:
|
204
203
|
column = getattr(table.c, condition.field, None)
|
205
204
|
if column is None:
|
206
205
|
continue
|
207
|
-
|
206
|
+
|
208
207
|
if condition.operator == "eq":
|
209
208
|
conditions.append(column == condition.value)
|
210
209
|
elif condition.operator == "ne":
|
@@ -238,7 +237,7 @@ class Database(Generic[DatabaseItemType]):
|
|
238
237
|
|
239
238
|
if not conditions:
|
240
239
|
return None
|
241
|
-
|
240
|
+
|
242
241
|
if query_filter.logic == "and":
|
243
242
|
return and_(*conditions)
|
244
243
|
else: # or
|
@@ -248,27 +247,25 @@ class Database(Generic[DatabaseItemType]):
|
|
248
247
|
"""Remove expired items from the database."""
|
249
248
|
if not self.auto_cleanup_expired:
|
250
249
|
return 0
|
251
|
-
|
250
|
+
|
252
251
|
now = datetime.now(timezone.utc)
|
253
|
-
|
252
|
+
|
254
253
|
# Find expired items by checking created_at + ttl < now
|
255
254
|
stmt = select(self._table).where(
|
256
255
|
and_(
|
257
256
|
self._table.c.ttl.isnot(None),
|
258
|
-
self._table.c.created_at +
|
259
|
-
|
257
|
+
self._table.c.created_at + (self._table.c.ttl * timedelta(seconds=1))
|
258
|
+
< now,
|
260
259
|
)
|
261
260
|
)
|
262
|
-
|
261
|
+
|
263
262
|
expired_items = session.execute(stmt).fetchall()
|
264
263
|
expired_ids = [item.id for item in expired_items]
|
265
|
-
|
264
|
+
|
266
265
|
if expired_ids:
|
267
|
-
delete_stmt = delete(self._table).where(
|
268
|
-
self._table.c.id.in_(expired_ids)
|
269
|
-
)
|
266
|
+
delete_stmt = delete(self._table).where(self._table.c.id.in_(expired_ids))
|
270
267
|
session.execute(delete_stmt)
|
271
|
-
|
268
|
+
|
272
269
|
return len(expired_ids)
|
273
270
|
|
274
271
|
def add(
|
@@ -281,31 +278,31 @@ class Database(Generic[DatabaseItemType]):
|
|
281
278
|
) -> str:
|
282
279
|
"""
|
283
280
|
Add an item to the database.
|
284
|
-
|
281
|
+
|
285
282
|
Args:
|
286
283
|
item: The item to store
|
287
284
|
id: Optional ID (will generate UUID if not provided)
|
288
285
|
filters: Optional filters/metadata
|
289
286
|
ttl: Optional TTL in seconds
|
290
|
-
|
287
|
+
|
291
288
|
Returns:
|
292
289
|
The ID of the stored item
|
293
290
|
"""
|
294
291
|
self._validate_schema(item)
|
295
|
-
|
292
|
+
|
296
293
|
item_id = id or str(uuid.uuid4())
|
297
294
|
item_ttl = ttl or self.ttl
|
298
295
|
now = datetime.now(timezone.utc)
|
299
|
-
|
296
|
+
|
300
297
|
serialized_item = self._serialize_item(item)
|
301
298
|
serialized_filters = json.dumps(filters or {})
|
302
|
-
|
299
|
+
|
303
300
|
with self._session_factory() as session:
|
304
301
|
# Check if item already exists
|
305
302
|
existing = session.execute(
|
306
303
|
select(self._table).where(self._table.c.id == item_id)
|
307
304
|
).fetchone()
|
308
|
-
|
305
|
+
|
309
306
|
if existing:
|
310
307
|
# Update existing item
|
311
308
|
stmt = (
|
@@ -329,14 +326,14 @@ class Database(Generic[DatabaseItemType]):
|
|
329
326
|
ttl=item_ttl,
|
330
327
|
table_name=self.table_name,
|
331
328
|
)
|
332
|
-
|
329
|
+
|
333
330
|
session.execute(stmt)
|
334
|
-
|
331
|
+
|
335
332
|
# Cleanup expired items
|
336
333
|
self._cleanup_expired_items(session)
|
337
|
-
|
334
|
+
|
338
335
|
session.commit()
|
339
|
-
|
336
|
+
|
340
337
|
return item_id
|
341
338
|
|
342
339
|
def get(
|
@@ -347,42 +344,40 @@ class Database(Generic[DatabaseItemType]):
|
|
347
344
|
) -> Optional[DatabaseItem[DatabaseItemType]]:
|
348
345
|
"""
|
349
346
|
Get an item by ID.
|
350
|
-
|
347
|
+
|
351
348
|
Args:
|
352
349
|
id: The item ID
|
353
350
|
filters: Optional filters to match
|
354
|
-
|
351
|
+
|
355
352
|
Returns:
|
356
353
|
The database item or None if not found
|
357
354
|
"""
|
358
355
|
with self._session_factory() as session:
|
359
356
|
stmt = select(self._table).where(self._table.c.id == id)
|
360
357
|
result = session.execute(stmt).fetchone()
|
361
|
-
|
358
|
+
|
362
359
|
if not result:
|
363
360
|
return None
|
364
|
-
|
361
|
+
|
365
362
|
# Check if expired
|
366
363
|
if result.ttl is not None:
|
367
364
|
expires_at = result.created_at + timedelta(seconds=result.ttl)
|
368
365
|
if datetime.now(timezone.utc) >= expires_at:
|
369
366
|
# Delete expired item
|
370
|
-
session.execute(
|
371
|
-
delete(self._table).where(self._table.c.id == id)
|
372
|
-
)
|
367
|
+
session.execute(delete(self._table).where(self._table.c.id == id))
|
373
368
|
session.commit()
|
374
369
|
return None
|
375
|
-
|
370
|
+
|
376
371
|
# Check filters if provided
|
377
372
|
if filters:
|
378
373
|
stored_filters = json.loads(result.filters or "{}")
|
379
374
|
if not all(stored_filters.get(k) == v for k, v in filters.items()):
|
380
375
|
return None
|
381
|
-
|
376
|
+
|
382
377
|
# Deserialize and return
|
383
378
|
item_data = self._deserialize_item(result.item_data)
|
384
379
|
stored_filters = json.loads(result.filters or "{}")
|
385
|
-
|
380
|
+
|
386
381
|
return DatabaseItem(
|
387
382
|
id=result.id,
|
388
383
|
item=item_data,
|
@@ -404,29 +399,29 @@ class Database(Generic[DatabaseItemType]):
|
|
404
399
|
) -> List[DatabaseItem[DatabaseItemType]]:
|
405
400
|
"""
|
406
401
|
Query items from the database.
|
407
|
-
|
402
|
+
|
408
403
|
Args:
|
409
404
|
query_filter: Filter conditions to apply
|
410
405
|
limit: Maximum number of results
|
411
406
|
offset: Number of results to skip
|
412
407
|
order_by: Field to order by
|
413
408
|
ascending: Sort direction
|
414
|
-
|
409
|
+
|
415
410
|
Returns:
|
416
411
|
List of matching database items
|
417
412
|
"""
|
418
413
|
with self._session_factory() as session:
|
419
414
|
# Cleanup expired items first
|
420
415
|
self._cleanup_expired_items(session)
|
421
|
-
|
416
|
+
|
422
417
|
stmt = select(self._table)
|
423
|
-
|
418
|
+
|
424
419
|
# Apply filters
|
425
420
|
if query_filter:
|
426
421
|
conditions = self._build_query_conditions(query_filter, self._table)
|
427
422
|
if conditions is not None:
|
428
423
|
stmt = stmt.where(conditions)
|
429
|
-
|
424
|
+
|
430
425
|
# Apply ordering
|
431
426
|
if order_by:
|
432
427
|
column = getattr(self._table.c, order_by, None)
|
@@ -438,15 +433,15 @@ class Database(Generic[DatabaseItemType]):
|
|
438
433
|
else:
|
439
434
|
# Default order by created_at desc
|
440
435
|
stmt = stmt.order_by(self._table.c.created_at.desc())
|
441
|
-
|
436
|
+
|
442
437
|
# Apply pagination
|
443
438
|
if offset > 0:
|
444
439
|
stmt = stmt.offset(offset)
|
445
440
|
if limit is not None:
|
446
441
|
stmt = stmt.limit(limit)
|
447
|
-
|
442
|
+
|
448
443
|
results = session.execute(stmt).fetchall()
|
449
|
-
|
444
|
+
|
450
445
|
items = []
|
451
446
|
for result in results:
|
452
447
|
# Double-check expiration (in case of race conditions)
|
@@ -454,29 +449,31 @@ class Database(Generic[DatabaseItemType]):
|
|
454
449
|
expires_at = result.created_at + timedelta(seconds=result.ttl)
|
455
450
|
if datetime.now(timezone.utc) >= expires_at:
|
456
451
|
continue
|
457
|
-
|
452
|
+
|
458
453
|
item_data = self._deserialize_item(result.item_data)
|
459
454
|
stored_filters = json.loads(result.filters or "{}")
|
460
|
-
|
461
|
-
items.append(
|
462
|
-
|
463
|
-
|
464
|
-
|
465
|
-
|
466
|
-
|
467
|
-
|
468
|
-
|
469
|
-
|
470
|
-
|
455
|
+
|
456
|
+
items.append(
|
457
|
+
DatabaseItem(
|
458
|
+
id=result.id,
|
459
|
+
item=item_data,
|
460
|
+
created_at=result.created_at,
|
461
|
+
updated_at=result.updated_at,
|
462
|
+
ttl=result.ttl,
|
463
|
+
filters=stored_filters,
|
464
|
+
table_name=result.table_name,
|
465
|
+
)
|
466
|
+
)
|
467
|
+
|
471
468
|
return items
|
472
469
|
|
473
470
|
def delete(self, id: str) -> bool:
|
474
471
|
"""
|
475
472
|
Delete an item by ID.
|
476
|
-
|
473
|
+
|
477
474
|
Args:
|
478
475
|
id: The item ID
|
479
|
-
|
476
|
+
|
480
477
|
Returns:
|
481
478
|
True if item was deleted, False if not found
|
482
479
|
"""
|
@@ -492,32 +489,33 @@ class Database(Generic[DatabaseItemType]):
|
|
492
489
|
) -> int:
|
493
490
|
"""
|
494
491
|
Count items matching the filter.
|
495
|
-
|
492
|
+
|
496
493
|
Args:
|
497
494
|
query_filter: Filter conditions to apply
|
498
|
-
|
495
|
+
|
499
496
|
Returns:
|
500
497
|
Number of matching items
|
501
498
|
"""
|
502
499
|
with self._session_factory() as session:
|
503
500
|
# Cleanup expired items first
|
504
501
|
self._cleanup_expired_items(session)
|
505
|
-
|
502
|
+
|
506
503
|
from sqlalchemy import func
|
504
|
+
|
507
505
|
stmt = select(func.count(self._table.c.id))
|
508
|
-
|
506
|
+
|
509
507
|
if query_filter:
|
510
508
|
conditions = self._build_query_conditions(query_filter, self._table)
|
511
509
|
if conditions is not None:
|
512
510
|
stmt = stmt.where(conditions)
|
513
|
-
|
511
|
+
|
514
512
|
result = session.execute(stmt).fetchone()
|
515
513
|
return result[0] if result else 0
|
516
514
|
|
517
515
|
def clear(self) -> int:
|
518
516
|
"""
|
519
517
|
Clear all items from the database.
|
520
|
-
|
518
|
+
|
521
519
|
Returns:
|
522
520
|
Number of items deleted
|
523
521
|
"""
|
@@ -530,7 +528,7 @@ class Database(Generic[DatabaseItemType]):
|
|
530
528
|
def cleanup_expired(self) -> int:
|
531
529
|
"""
|
532
530
|
Manually cleanup expired items.
|
533
|
-
|
531
|
+
|
534
532
|
Returns:
|
535
533
|
Number of items cleaned up
|
536
534
|
"""
|
@@ -543,7 +541,7 @@ class Database(Generic[DatabaseItemType]):
|
|
543
541
|
"""String representation of the database."""
|
544
542
|
location = str(self.path) if self.path else "memory"
|
545
543
|
return f"<Database name='{self.name}' location='{location}' table='{self.table_name}'>"
|
546
|
-
|
544
|
+
|
547
545
|
|
548
546
|
def create_database(
|
549
547
|
name: str,
|
@@ -556,7 +554,7 @@ def create_database(
|
|
556
554
|
) -> Database[DatabaseItemType]:
|
557
555
|
"""
|
558
556
|
Create a new database instance.
|
559
|
-
|
557
|
+
|
560
558
|
Args:
|
561
559
|
name: The name of the database
|
562
560
|
schema: Optional schema type for validation
|
@@ -573,6 +571,6 @@ def create_database(
|
|
573
571
|
schema=schema,
|
574
572
|
ttl=ttl,
|
575
573
|
path=path,
|
576
|
-
table_name=table_name,
|
574
|
+
table_name=table_name,
|
577
575
|
auto_cleanup_expired=auto_cleanup_expired,
|
578
|
-
)
|
576
|
+
)
|