chuk-ai-session-manager 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- chuk_ai_session_manager/__init__.py +57 -0
- chuk_ai_session_manager/exceptions.py +129 -0
- chuk_ai_session_manager/infinite_conversation.py +316 -0
- chuk_ai_session_manager/models/__init__.py +44 -0
- chuk_ai_session_manager/models/event_source.py +8 -0
- chuk_ai_session_manager/models/event_type.py +9 -0
- chuk_ai_session_manager/models/session.py +316 -0
- chuk_ai_session_manager/models/session_event.py +166 -0
- chuk_ai_session_manager/models/session_metadata.py +37 -0
- chuk_ai_session_manager/models/session_run.py +115 -0
- chuk_ai_session_manager/models/token_usage.py +316 -0
- chuk_ai_session_manager/sample_tools.py +194 -0
- chuk_ai_session_manager/session_aware_tool_processor.py +178 -0
- chuk_ai_session_manager/session_prompt_builder.py +474 -0
- chuk_ai_session_manager/storage/__init__.py +44 -0
- chuk_ai_session_manager/storage/base.py +50 -0
- chuk_ai_session_manager/storage/providers/__init__.py +0 -0
- chuk_ai_session_manager/storage/providers/file.py +348 -0
- chuk_ai_session_manager/storage/providers/memory.py +96 -0
- chuk_ai_session_manager/storage/providers/redis.py +295 -0
- chuk_ai_session_manager-0.1.1.dist-info/METADATA +501 -0
- chuk_ai_session_manager-0.1.1.dist-info/RECORD +24 -0
- chuk_ai_session_manager-0.1.1.dist-info/WHEEL +5 -0
- chuk_ai_session_manager-0.1.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,348 @@
|
|
|
1
|
+
# chuk_ai_session_manager/storage/providers/file.py
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Async file-based session storage implementation with improved async semantics.
|
|
5
|
+
"""
|
|
6
|
+
import json
|
|
7
|
+
import logging
|
|
8
|
+
import asyncio
|
|
9
|
+
from datetime import datetime
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Any, Dict, List, Optional, Type, TypeVar, Union, Generic
|
|
12
|
+
import os
|
|
13
|
+
|
|
14
|
+
# Check for aiofiles availability
|
|
15
|
+
try:
|
|
16
|
+
import aiofiles
|
|
17
|
+
AIOFILES_AVAILABLE = True
|
|
18
|
+
except ImportError:
|
|
19
|
+
AIOFILES_AVAILABLE = False
|
|
20
|
+
logging.warning("aiofiles package not installed; falling back to synchronous I/O in thread pool.")
|
|
21
|
+
|
|
22
|
+
# session manager imports
|
|
23
|
+
from chuk_ai_session_manager.models.session import Session
|
|
24
|
+
from chuk_ai_session_manager.storage.base import SessionStoreInterface
|
|
25
|
+
from chuk_ai_session_manager.exceptions import SessionManagerError
|
|
26
|
+
|
|
27
|
+
# Type variable for serializable models
|
|
28
|
+
T = TypeVar('T', bound='Session')
|
|
29
|
+
|
|
30
|
+
# Setup logging
|
|
31
|
+
logger = logging.getLogger(__name__)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class FileStorageError(SessionManagerError):
|
|
35
|
+
"""Raised when file storage operations fail."""
|
|
36
|
+
pass
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class SessionSerializer(Generic[T]):
|
|
40
|
+
"""Handles serialization and deserialization of session objects."""
|
|
41
|
+
|
|
42
|
+
@classmethod
|
|
43
|
+
def to_dict(cls, obj: T) -> Dict[str, Any]:
|
|
44
|
+
"""Convert a session object to a dictionary for serialization."""
|
|
45
|
+
# Use Pydantic's model_dump method for serialization
|
|
46
|
+
return obj.model_dump()
|
|
47
|
+
|
|
48
|
+
@classmethod
|
|
49
|
+
def from_dict(cls, data: Dict[str, Any], model_class: Type[T]) -> T:
|
|
50
|
+
"""Convert a dictionary to a session object."""
|
|
51
|
+
try:
|
|
52
|
+
# Use Pydantic's model validation for deserialization
|
|
53
|
+
return model_class.model_validate(data)
|
|
54
|
+
except Exception as e:
|
|
55
|
+
raise FileStorageError(f"Failed to deserialize {model_class.__name__}: {str(e)}")
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class FileSessionStore(SessionStoreInterface, Generic[T]):
|
|
59
|
+
"""
|
|
60
|
+
An async file session store that persists sessions to JSON files.
|
|
61
|
+
|
|
62
|
+
This implementation stores each session as a separate JSON file in
|
|
63
|
+
the specified directory, using aiofiles for non-blocking I/O when available.
|
|
64
|
+
It uses file locks to prevent race conditions during reads and writes.
|
|
65
|
+
"""
|
|
66
|
+
|
|
67
|
+
def __init__(self,
|
|
68
|
+
directory: Union[str, Path],
|
|
69
|
+
session_class: Type[T] = Session,
|
|
70
|
+
auto_save: bool = True):
|
|
71
|
+
"""
|
|
72
|
+
Initialize the async file session store.
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
directory: Directory where session files will be stored
|
|
76
|
+
session_class: The Session class to use for deserialization
|
|
77
|
+
auto_save: Whether to automatically save on each update
|
|
78
|
+
"""
|
|
79
|
+
self.directory = Path(directory)
|
|
80
|
+
self.directory.mkdir(parents=True, exist_ok=True)
|
|
81
|
+
self.session_class = session_class
|
|
82
|
+
self.auto_save = auto_save
|
|
83
|
+
|
|
84
|
+
# In-memory cache for better performance
|
|
85
|
+
self._cache: Dict[str, T] = {}
|
|
86
|
+
|
|
87
|
+
# Locks for file operations (keyed by session ID)
|
|
88
|
+
self._locks: Dict[str, asyncio.Lock] = {}
|
|
89
|
+
|
|
90
|
+
def _get_path(self, session_id: str) -> Path:
|
|
91
|
+
"""Get the file path for a session ID."""
|
|
92
|
+
return self.directory / f"{session_id}.json"
|
|
93
|
+
|
|
94
|
+
def _json_default(self, obj: Any) -> Any:
|
|
95
|
+
"""Handle non-serializable objects in JSON serialization."""
|
|
96
|
+
if isinstance(obj, datetime):
|
|
97
|
+
return obj.isoformat()
|
|
98
|
+
raise TypeError(f"Object of type {type(obj)} is not JSON serializable")
|
|
99
|
+
|
|
100
|
+
async def _get_lock(self, session_id: str) -> asyncio.Lock:
|
|
101
|
+
"""Get a lock for a specific session ID."""
|
|
102
|
+
if session_id not in self._locks:
|
|
103
|
+
self._locks[session_id] = asyncio.Lock()
|
|
104
|
+
return self._locks[session_id]
|
|
105
|
+
|
|
106
|
+
async def get(self, session_id: str) -> Optional[T]:
|
|
107
|
+
"""Async: Retrieve a session by its ID."""
|
|
108
|
+
# Check cache first
|
|
109
|
+
if session_id in self._cache:
|
|
110
|
+
return self._cache[session_id]
|
|
111
|
+
|
|
112
|
+
# If not in cache, try to load from file
|
|
113
|
+
file_path = self._get_path(session_id)
|
|
114
|
+
if not file_path.exists():
|
|
115
|
+
return None
|
|
116
|
+
|
|
117
|
+
# Get lock for this session
|
|
118
|
+
lock = await self._get_lock(session_id)
|
|
119
|
+
|
|
120
|
+
# Use lock for file read to prevent race conditions
|
|
121
|
+
async with lock:
|
|
122
|
+
try:
|
|
123
|
+
if AIOFILES_AVAILABLE:
|
|
124
|
+
async with aiofiles.open(file_path, 'r', encoding='utf-8') as f:
|
|
125
|
+
data_str = await f.read()
|
|
126
|
+
data = json.loads(data_str)
|
|
127
|
+
else:
|
|
128
|
+
# If aiofiles not available, use executor to avoid blocking
|
|
129
|
+
loop = asyncio.get_running_loop()
|
|
130
|
+
data_str = await loop.run_in_executor(
|
|
131
|
+
None,
|
|
132
|
+
lambda: open(file_path, 'r', encoding='utf-8').read()
|
|
133
|
+
)
|
|
134
|
+
data = json.loads(data_str)
|
|
135
|
+
|
|
136
|
+
session = SessionSerializer.from_dict(data, self.session_class)
|
|
137
|
+
# Update cache
|
|
138
|
+
self._cache[session_id] = session
|
|
139
|
+
return session
|
|
140
|
+
except (FileStorageError, json.JSONDecodeError, IOError) as e:
|
|
141
|
+
logger.error(f"Failed to load session {session_id}: {e}")
|
|
142
|
+
return None
|
|
143
|
+
|
|
144
|
+
async def save(self, session: T) -> None:
|
|
145
|
+
"""Async: Save a session to the store."""
|
|
146
|
+
session_id = session.id
|
|
147
|
+
# Update cache
|
|
148
|
+
self._cache[session_id] = session
|
|
149
|
+
|
|
150
|
+
if self.auto_save:
|
|
151
|
+
await self._save_to_file(session)
|
|
152
|
+
|
|
153
|
+
async def _save_to_file(self, session: T) -> None:
|
|
154
|
+
"""Async: Save a session to its JSON file."""
|
|
155
|
+
session_id = session.id
|
|
156
|
+
file_path = self._get_path(session_id)
|
|
157
|
+
|
|
158
|
+
# Get lock for this session
|
|
159
|
+
lock = await self._get_lock(session_id)
|
|
160
|
+
|
|
161
|
+
# Use lock for file write to prevent race conditions
|
|
162
|
+
async with lock:
|
|
163
|
+
try:
|
|
164
|
+
# Create temp file first to avoid partial writes
|
|
165
|
+
temp_path = file_path.with_suffix('.tmp')
|
|
166
|
+
|
|
167
|
+
data = SessionSerializer.to_dict(session)
|
|
168
|
+
json_str = json.dumps(data, default=self._json_default, indent=2)
|
|
169
|
+
|
|
170
|
+
if AIOFILES_AVAILABLE:
|
|
171
|
+
async with aiofiles.open(temp_path, 'w', encoding='utf-8') as f:
|
|
172
|
+
await f.write(json_str)
|
|
173
|
+
else:
|
|
174
|
+
# If aiofiles not available, use executor to avoid blocking
|
|
175
|
+
loop = asyncio.get_running_loop()
|
|
176
|
+
await loop.run_in_executor(
|
|
177
|
+
None,
|
|
178
|
+
lambda: open(temp_path, 'w', encoding='utf-8').write(json_str)
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
# Rename temp file to actual file (atomic operation)
|
|
182
|
+
os.replace(temp_path, file_path)
|
|
183
|
+
|
|
184
|
+
except (FileStorageError, IOError, TypeError) as e:
|
|
185
|
+
logger.error(f"Failed to save session {session_id}: {e}")
|
|
186
|
+
if temp_path.exists():
|
|
187
|
+
temp_path.unlink() # Clean up temp file
|
|
188
|
+
raise FileStorageError(f"Failed to save session {session_id}: {str(e)}")
|
|
189
|
+
|
|
190
|
+
async def delete(self, session_id: str) -> None:
|
|
191
|
+
"""Async: Delete a session by its ID."""
|
|
192
|
+
# Remove from cache
|
|
193
|
+
if session_id in self._cache:
|
|
194
|
+
del self._cache[session_id]
|
|
195
|
+
|
|
196
|
+
# Get lock for this session
|
|
197
|
+
lock = await self._get_lock(session_id)
|
|
198
|
+
|
|
199
|
+
# Use lock for deletion to prevent race conditions
|
|
200
|
+
async with lock:
|
|
201
|
+
# Remove file if it exists
|
|
202
|
+
file_path = self._get_path(session_id)
|
|
203
|
+
if file_path.exists():
|
|
204
|
+
try:
|
|
205
|
+
# Run in executor to avoid blocking
|
|
206
|
+
loop = asyncio.get_running_loop()
|
|
207
|
+
await loop.run_in_executor(None, file_path.unlink)
|
|
208
|
+
except IOError as e:
|
|
209
|
+
logger.error(f"Failed to delete session file {session_id}: {e}")
|
|
210
|
+
raise FileStorageError(f"Failed to delete session {session_id}: {str(e)}")
|
|
211
|
+
|
|
212
|
+
# Remove lock for this session
|
|
213
|
+
if session_id in self._locks:
|
|
214
|
+
del self._locks[session_id]
|
|
215
|
+
|
|
216
|
+
async def list_sessions(self, prefix: str = "") -> List[str]:
|
|
217
|
+
"""Async: List all session IDs, optionally filtered by prefix."""
|
|
218
|
+
try:
|
|
219
|
+
# Run in executor to avoid blocking
|
|
220
|
+
loop = asyncio.get_running_loop()
|
|
221
|
+
files = await loop.run_in_executor(
|
|
222
|
+
None,
|
|
223
|
+
lambda: list(self.directory.glob("*.json"))
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
# Extract the session IDs (filenames without extension)
|
|
227
|
+
session_ids = [f.stem for f in files]
|
|
228
|
+
|
|
229
|
+
# Filter by prefix if provided
|
|
230
|
+
if prefix:
|
|
231
|
+
session_ids = [sid for sid in session_ids if sid.startswith(prefix)]
|
|
232
|
+
|
|
233
|
+
return session_ids
|
|
234
|
+
except IOError as e:
|
|
235
|
+
logger.error(f"Failed to list sessions: {e}")
|
|
236
|
+
raise FileStorageError(f"Failed to list sessions: {str(e)}")
|
|
237
|
+
|
|
238
|
+
async def flush(self) -> None:
|
|
239
|
+
"""Async: Force save all cached sessions to disk."""
|
|
240
|
+
save_tasks = []
|
|
241
|
+
for session in self._cache.values():
|
|
242
|
+
# Create tasks but don't await them yet
|
|
243
|
+
task = asyncio.create_task(self._save_to_file(session))
|
|
244
|
+
save_tasks.append(task)
|
|
245
|
+
|
|
246
|
+
# Wait for all save operations to complete
|
|
247
|
+
if save_tasks:
|
|
248
|
+
# Use gather with return_exceptions to prevent one error from stopping all saves
|
|
249
|
+
results = await asyncio.gather(*save_tasks, return_exceptions=True)
|
|
250
|
+
|
|
251
|
+
# Log any errors
|
|
252
|
+
for result in results:
|
|
253
|
+
if isinstance(result, Exception):
|
|
254
|
+
logger.error(f"Error during flush: {result}")
|
|
255
|
+
|
|
256
|
+
async def clear_cache(self) -> None:
|
|
257
|
+
"""Async: Clear the in-memory cache."""
|
|
258
|
+
self._cache.clear()
|
|
259
|
+
|
|
260
|
+
async def vacuum(self) -> int:
|
|
261
|
+
"""
|
|
262
|
+
Async: Remove orphaned temporary files and fix any corrupt files.
|
|
263
|
+
|
|
264
|
+
Returns:
|
|
265
|
+
Number of fixed or removed files
|
|
266
|
+
"""
|
|
267
|
+
count = 0
|
|
268
|
+
|
|
269
|
+
try:
|
|
270
|
+
# Find all temp files
|
|
271
|
+
loop = asyncio.get_running_loop()
|
|
272
|
+
temp_files = await loop.run_in_executor(
|
|
273
|
+
None,
|
|
274
|
+
lambda: list(self.directory.glob("*.tmp"))
|
|
275
|
+
)
|
|
276
|
+
|
|
277
|
+
# Delete temp files
|
|
278
|
+
for temp_file in temp_files:
|
|
279
|
+
try:
|
|
280
|
+
await loop.run_in_executor(None, temp_file.unlink)
|
|
281
|
+
count += 1
|
|
282
|
+
except IOError as e:
|
|
283
|
+
logger.error(f"Failed to delete temp file {temp_file}: {e}")
|
|
284
|
+
|
|
285
|
+
# Find all json files
|
|
286
|
+
json_files = await loop.run_in_executor(
|
|
287
|
+
None,
|
|
288
|
+
lambda: list(self.directory.glob("*.json"))
|
|
289
|
+
)
|
|
290
|
+
|
|
291
|
+
# Check each file for corruption
|
|
292
|
+
for json_file in json_files:
|
|
293
|
+
try:
|
|
294
|
+
# Try to read the file
|
|
295
|
+
if AIOFILES_AVAILABLE:
|
|
296
|
+
async with aiofiles.open(json_file, 'r', encoding='utf-8') as f:
|
|
297
|
+
data_str = await f.read()
|
|
298
|
+
# Just try to parse it to see if it's valid JSON
|
|
299
|
+
json.loads(data_str)
|
|
300
|
+
else:
|
|
301
|
+
data_str = await loop.run_in_executor(
|
|
302
|
+
None,
|
|
303
|
+
lambda: open(json_file, 'r', encoding='utf-8').read()
|
|
304
|
+
)
|
|
305
|
+
json.loads(data_str)
|
|
306
|
+
except (json.JSONDecodeError, IOError) as e:
|
|
307
|
+
# File is corrupt, rename it
|
|
308
|
+
logger.warning(f"Found corrupt file {json_file}: {e}")
|
|
309
|
+
corrupt_path = json_file.with_suffix('.corrupt')
|
|
310
|
+
await loop.run_in_executor(
|
|
311
|
+
None,
|
|
312
|
+
lambda: os.rename(json_file, corrupt_path)
|
|
313
|
+
)
|
|
314
|
+
count += 1
|
|
315
|
+
|
|
316
|
+
return count
|
|
317
|
+
except Exception as e:
|
|
318
|
+
logger.error(f"Error during vacuum: {e}")
|
|
319
|
+
raise FileStorageError(f"Failed to vacuum storage: {str(e)}")
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
async def create_file_session_store(
|
|
323
|
+
directory: Union[str, Path],
|
|
324
|
+
session_class: Type[T] = Session,
|
|
325
|
+
auto_save: bool = True
|
|
326
|
+
) -> FileSessionStore[T]:
|
|
327
|
+
"""
|
|
328
|
+
Create an async file-based session store.
|
|
329
|
+
|
|
330
|
+
Args:
|
|
331
|
+
directory: Directory where session files will be stored
|
|
332
|
+
session_class: The Session class to use
|
|
333
|
+
auto_save: Whether to automatically save on each update
|
|
334
|
+
|
|
335
|
+
Returns:
|
|
336
|
+
A configured FileSessionStore
|
|
337
|
+
"""
|
|
338
|
+
store = FileSessionStore(directory, session_class, auto_save)
|
|
339
|
+
|
|
340
|
+
# Optional: Run vacuum on startup to clean any leftover temp files
|
|
341
|
+
try:
|
|
342
|
+
fixed_count = await store.vacuum()
|
|
343
|
+
if fixed_count > 0:
|
|
344
|
+
logger.info(f"Cleaned up {fixed_count} temporary or corrupt files during store initialization")
|
|
345
|
+
except Exception as e:
|
|
346
|
+
logger.warning(f"Error during initial vacuum: {e}")
|
|
347
|
+
|
|
348
|
+
return store
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
# chuk_ai_session_manager/storage/providers/memory.py
|
|
2
|
+
"""
|
|
3
|
+
Async in-memory session storage implementation with improved async semantics.
|
|
4
|
+
"""
|
|
5
|
+
from typing import Any, Dict, List, Optional
|
|
6
|
+
import asyncio
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
|
|
9
|
+
from chuk_ai_session_manager.storage.base import SessionStoreInterface
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class InMemorySessionStore(SessionStoreInterface):
|
|
13
|
+
"""A simple in-memory store for Session objects with proper async interface.
|
|
14
|
+
|
|
15
|
+
This implementation stores sessions in a dictionary and is not
|
|
16
|
+
persistent across application restarts. It uses asyncio locks to
|
|
17
|
+
ensure thread safety when multiple coroutines access the store.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
def __init__(self) -> None:
|
|
21
|
+
"""Initialize an empty in-memory store."""
|
|
22
|
+
self._data: Dict[str, Any] = {}
|
|
23
|
+
self._lock = asyncio.Lock() # For thread safety in async operations
|
|
24
|
+
|
|
25
|
+
async def get(self, session_id: str) -> Optional[Any]:
|
|
26
|
+
"""Async: Retrieve a session by its ID, or None if not found."""
|
|
27
|
+
# Read operations don't need locking
|
|
28
|
+
return self._data.get(session_id)
|
|
29
|
+
|
|
30
|
+
async def save(self, session: Any) -> None:
|
|
31
|
+
"""Async: Save or update a session object in the store."""
|
|
32
|
+
async with self._lock:
|
|
33
|
+
self._data[session.id] = session
|
|
34
|
+
|
|
35
|
+
# Update metadata timestamp if available
|
|
36
|
+
if hasattr(session, 'metadata') and hasattr(session.metadata, 'update_timestamp'):
|
|
37
|
+
await session.metadata.update_timestamp()
|
|
38
|
+
|
|
39
|
+
async def delete(self, session_id: str) -> None:
|
|
40
|
+
"""Async: Delete a session by its ID."""
|
|
41
|
+
async with self._lock:
|
|
42
|
+
if session_id in self._data:
|
|
43
|
+
del self._data[session_id]
|
|
44
|
+
|
|
45
|
+
async def list_sessions(self, prefix: str = "") -> List[str]:
|
|
46
|
+
"""Async: List all session IDs, optionally filtered by prefix."""
|
|
47
|
+
# Read operations don't need locking
|
|
48
|
+
if not prefix:
|
|
49
|
+
return list(self._data.keys())
|
|
50
|
+
return [sid for sid in self._data.keys() if sid.startswith(prefix)]
|
|
51
|
+
|
|
52
|
+
async def clear(self) -> None:
|
|
53
|
+
"""Async: Clear all sessions from the store."""
|
|
54
|
+
async with self._lock:
|
|
55
|
+
self._data.clear()
|
|
56
|
+
|
|
57
|
+
async def get_by_property(self, key: str, value: Any) -> List[Any]:
|
|
58
|
+
"""
|
|
59
|
+
Async: Find sessions by a specific metadata property value.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
key: The metadata property key to search for
|
|
63
|
+
value: The value to match
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
A list of matching sessions
|
|
67
|
+
"""
|
|
68
|
+
results = []
|
|
69
|
+
for session in self._data.values():
|
|
70
|
+
if (hasattr(session, 'metadata') and
|
|
71
|
+
hasattr(session.metadata, 'properties') and
|
|
72
|
+
session.metadata.properties.get(key) == value):
|
|
73
|
+
results.append(session)
|
|
74
|
+
return results
|
|
75
|
+
|
|
76
|
+
async def get_by_state(self, key: str, value: Any) -> List[Any]:
|
|
77
|
+
"""
|
|
78
|
+
Async: Find sessions by a specific state value.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
key: The state key to search for
|
|
82
|
+
value: The value to match
|
|
83
|
+
|
|
84
|
+
Returns:
|
|
85
|
+
A list of matching sessions
|
|
86
|
+
"""
|
|
87
|
+
results = []
|
|
88
|
+
for session in self._data.values():
|
|
89
|
+
if (hasattr(session, 'state') and
|
|
90
|
+
session.state.get(key) == value):
|
|
91
|
+
results.append(session)
|
|
92
|
+
return results
|
|
93
|
+
|
|
94
|
+
async def count(self) -> int:
|
|
95
|
+
"""Async: Count the number of sessions in the store."""
|
|
96
|
+
return len(self._data)
|