slide-narrator 1.0.0__tar.gz → 1.0.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of slide-narrator might be problematic. Click here for more details.

Files changed (21) hide show
  1. {slide_narrator-1.0.0 → slide_narrator-1.0.2}/PKG-INFO +67 -14
  2. {slide_narrator-1.0.0 → slide_narrator-1.0.2}/README.md +65 -12
  3. {slide_narrator-1.0.0 → slide_narrator-1.0.2}/narrator/__init__.py +1 -1
  4. slide_narrator-1.0.2/narrator/database/cli.py +222 -0
  5. {slide_narrator-1.0.0 → slide_narrator-1.0.2}/narrator/database/storage_backend.py +74 -34
  6. {slide_narrator-1.0.0 → slide_narrator-1.0.2}/narrator/storage/file_store.py +1 -7
  7. {slide_narrator-1.0.0 → slide_narrator-1.0.2}/narrator/utils/logging.py +18 -24
  8. {slide_narrator-1.0.0 → slide_narrator-1.0.2}/pyproject.toml +3 -3
  9. slide_narrator-1.0.0/narrator/database/cli.py +0 -66
  10. {slide_narrator-1.0.0 → slide_narrator-1.0.2}/.gitignore +0 -0
  11. {slide_narrator-1.0.0 → slide_narrator-1.0.2}/LICENSE +0 -0
  12. {slide_narrator-1.0.0 → slide_narrator-1.0.2}/narrator/database/__init__.py +0 -0
  13. {slide_narrator-1.0.0 → slide_narrator-1.0.2}/narrator/database/migrations/__init__.py +0 -0
  14. {slide_narrator-1.0.0 → slide_narrator-1.0.2}/narrator/database/models.py +0 -0
  15. {slide_narrator-1.0.0 → slide_narrator-1.0.2}/narrator/database/thread_store.py +0 -0
  16. {slide_narrator-1.0.0 → slide_narrator-1.0.2}/narrator/models/__init__.py +0 -0
  17. {slide_narrator-1.0.0 → slide_narrator-1.0.2}/narrator/models/attachment.py +0 -0
  18. {slide_narrator-1.0.0 → slide_narrator-1.0.2}/narrator/models/message.py +0 -0
  19. {slide_narrator-1.0.0 → slide_narrator-1.0.2}/narrator/models/thread.py +0 -0
  20. {slide_narrator-1.0.0 → slide_narrator-1.0.2}/narrator/storage/__init__.py +0 -0
  21. {slide_narrator-1.0.0 → slide_narrator-1.0.2}/narrator/utils/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: slide-narrator
3
- Version: 1.0.0
3
+ Version: 1.0.2
4
4
  Summary: Thread and file storage components for conversational AI - the companion to Tyler AI framework
5
5
  Project-URL: Homepage, https://github.com/adamwdraper/slide
6
6
  Project-URL: Repository, https://github.com/adamwdraper/slide
@@ -15,6 +15,7 @@ Classifier: Operating System :: OS Independent
15
15
  Classifier: Programming Language :: Python :: 3
16
16
  Classifier: Programming Language :: Python :: 3.11
17
17
  Classifier: Programming Language :: Python :: 3.12
18
+ Classifier: Programming Language :: Python :: 3.13
18
19
  Requires-Python: >=3.11
19
20
  Requires-Dist: aiosqlite>=0.21.0
20
21
  Requires-Dist: alembic>=1.14.1
@@ -25,7 +26,6 @@ Requires-Dist: pydantic>=2.10.4
25
26
  Requires-Dist: pypdf>=5.3.0
26
27
  Requires-Dist: python-magic>=0.4.0
27
28
  Requires-Dist: sqlalchemy>=2.0.36
28
- Requires-Dist: uuid-utils>=0.10.0
29
29
  Provides-Extra: dev
30
30
  Requires-Dist: coverage>=7.6.10; extra == 'dev'
31
31
  Requires-Dist: pytest-asyncio>=0.25.2; extra == 'dev'
@@ -74,19 +74,72 @@ pip install slide-narrator
74
74
 
75
75
  ## Setup
76
76
 
77
+ ### Docker Setup (Recommended for Development)
78
+
79
+ For local development with PostgreSQL, Narrator includes built-in Docker commands:
80
+
81
+ ```bash
82
+ # One-command setup: starts PostgreSQL and initializes tables
83
+ uv run narrator docker-setup
84
+
85
+ # This will:
86
+ # 1. Start a PostgreSQL container
87
+ # 2. Wait for it to be ready
88
+ # 3. Initialize the database tables
89
+ # 4. Show you the connection string
90
+
91
+ # The database will be available at:
92
+ # postgresql+asyncpg://narrator:narrator_dev@localhost:5432/narrator
93
+ ```
94
+
95
+ To manage the Docker container:
96
+
97
+ ```bash
98
+ # Stop container (preserves data)
99
+ uv run narrator docker-stop
100
+
101
+ # Stop and remove all data
102
+ uv run narrator docker-stop --remove-volumes
103
+
104
+ # Start container again
105
+ uv run narrator docker-start
106
+
107
+ # Check database status
108
+ uv run narrator status
109
+ ```
110
+
111
+ For custom configurations, the Docker commands respect environment variables:
112
+
113
+ ```bash
114
+ # Use a different port
115
+ uv run narrator docker-setup --port 5433
116
+
117
+ # Or set environment variables (matching docker-compose.yml)
118
+ export NARRATOR_DB_NAME=mydb
119
+ export NARRATOR_DB_USER=myuser
120
+ export NARRATOR_DB_PASSWORD=mypassword
121
+ export NARRATOR_DB_PORT=5433
122
+
123
+ # Then run docker-setup
124
+ uv run narrator docker-setup
125
+
126
+ # This will create:
127
+ # postgresql+asyncpg://myuser:mypassword@localhost:5433/mydb
128
+ ```
129
+
77
130
  ### Database Setup
78
131
 
79
132
  For production use with PostgreSQL or SQLite persistence, you'll need to initialize the database tables:
80
133
 
81
134
  ```bash
82
135
  # Initialize database tables (PostgreSQL)
83
- narrator-db init --database-url "postgresql+asyncpg://user:password@localhost/dbname"
136
+ uv run narrator init --database-url "postgresql+asyncpg://user:password@localhost/dbname"
84
137
 
85
138
  # Initialize database tables (SQLite)
86
- narrator-db init --database-url "sqlite+aiosqlite:///path/to/your/database.db"
139
+ uv run narrator init --database-url "sqlite+aiosqlite:///path/to/your/database.db"
87
140
 
88
141
  # Check database status
89
- narrator-db status --database-url "postgresql+asyncpg://user:password@localhost/dbname"
142
+ uv run narrator status --database-url "postgresql+asyncpg://user:password@localhost/dbname"
90
143
  ```
91
144
 
92
145
  You can also use environment variables instead of passing the database URL:
@@ -96,8 +149,8 @@ You can also use environment variables instead of passing the database URL:
96
149
  export NARRATOR_DATABASE_URL="postgresql+asyncpg://user:password@localhost/dbname"
97
150
 
98
151
  # Then run without --database-url flag
99
- narrator-db init
100
- narrator-db status
152
+ uv run narrator init
153
+ uv run narrator status
101
154
  ```
102
155
 
103
156
  ### Environment Variables
@@ -382,22 +435,22 @@ The Narrator includes a CLI tool for database management:
382
435
 
383
436
  ```bash
384
437
  # Initialize database tables
385
- narrator-db init --database-url "postgresql+asyncpg://user:pass@localhost/dbname"
438
+ uv run narrator init --database-url "postgresql+asyncpg://user:pass@localhost/dbname"
386
439
 
387
440
  # Initialize using environment variable
388
441
  export NARRATOR_DATABASE_URL="postgresql+asyncpg://user:pass@localhost/dbname"
389
- narrator-db init
442
+ uv run narrator init
390
443
 
391
444
  # Check database status
392
- narrator-db status --database-url "postgresql+asyncpg://user:pass@localhost/dbname"
445
+ uv run narrator status --database-url "postgresql+asyncpg://user:pass@localhost/dbname"
393
446
 
394
447
  # Check status using environment variable
395
- narrator-db status
448
+ uv run narrator status
396
449
  ```
397
450
 
398
451
  Available commands:
399
- - `narrator-db init` - Initialize database tables
400
- - `narrator-db status` - Check database connection and basic statistics
452
+ - `uv run narrator init` - Initialize database tables
453
+ - `uv run narrator status` - Check database connection and basic statistics
401
454
 
402
455
  ## Key Design Principles
403
456
 
@@ -480,7 +533,7 @@ uv run pytest tests/ -v
480
533
  ### Test Requirements
481
534
 
482
535
  The test suite requires:
483
- - Python 3.12+
536
+ - Python 3.13+
484
537
  - pytest with async support
485
538
  - Test coverage reporting
486
539
  - System dependencies (libmagic for file type detection)
@@ -39,19 +39,72 @@ pip install slide-narrator
39
39
 
40
40
  ## Setup
41
41
 
42
+ ### Docker Setup (Recommended for Development)
43
+
44
+ For local development with PostgreSQL, Narrator includes built-in Docker commands:
45
+
46
+ ```bash
47
+ # One-command setup: starts PostgreSQL and initializes tables
48
+ uv run narrator docker-setup
49
+
50
+ # This will:
51
+ # 1. Start a PostgreSQL container
52
+ # 2. Wait for it to be ready
53
+ # 3. Initialize the database tables
54
+ # 4. Show you the connection string
55
+
56
+ # The database will be available at:
57
+ # postgresql+asyncpg://narrator:narrator_dev@localhost:5432/narrator
58
+ ```
59
+
60
+ To manage the Docker container:
61
+
62
+ ```bash
63
+ # Stop container (preserves data)
64
+ uv run narrator docker-stop
65
+
66
+ # Stop and remove all data
67
+ uv run narrator docker-stop --remove-volumes
68
+
69
+ # Start container again
70
+ uv run narrator docker-start
71
+
72
+ # Check database status
73
+ uv run narrator status
74
+ ```
75
+
76
+ For custom configurations, the Docker commands respect environment variables:
77
+
78
+ ```bash
79
+ # Use a different port
80
+ uv run narrator docker-setup --port 5433
81
+
82
+ # Or set environment variables (matching docker-compose.yml)
83
+ export NARRATOR_DB_NAME=mydb
84
+ export NARRATOR_DB_USER=myuser
85
+ export NARRATOR_DB_PASSWORD=mypassword
86
+ export NARRATOR_DB_PORT=5433
87
+
88
+ # Then run docker-setup
89
+ uv run narrator docker-setup
90
+
91
+ # This will create:
92
+ # postgresql+asyncpg://myuser:mypassword@localhost:5433/mydb
93
+ ```
94
+
42
95
  ### Database Setup
43
96
 
44
97
  For production use with PostgreSQL or SQLite persistence, you'll need to initialize the database tables:
45
98
 
46
99
  ```bash
47
100
  # Initialize database tables (PostgreSQL)
48
- narrator-db init --database-url "postgresql+asyncpg://user:password@localhost/dbname"
101
+ uv run narrator init --database-url "postgresql+asyncpg://user:password@localhost/dbname"
49
102
 
50
103
  # Initialize database tables (SQLite)
51
- narrator-db init --database-url "sqlite+aiosqlite:///path/to/your/database.db"
104
+ uv run narrator init --database-url "sqlite+aiosqlite:///path/to/your/database.db"
52
105
 
53
106
  # Check database status
54
- narrator-db status --database-url "postgresql+asyncpg://user:password@localhost/dbname"
107
+ uv run narrator status --database-url "postgresql+asyncpg://user:password@localhost/dbname"
55
108
  ```
56
109
 
57
110
  You can also use environment variables instead of passing the database URL:
@@ -61,8 +114,8 @@ You can also use environment variables instead of passing the database URL:
61
114
  export NARRATOR_DATABASE_URL="postgresql+asyncpg://user:password@localhost/dbname"
62
115
 
63
116
  # Then run without --database-url flag
64
- narrator-db init
65
- narrator-db status
117
+ uv run narrator init
118
+ uv run narrator status
66
119
  ```
67
120
 
68
121
  ### Environment Variables
@@ -347,22 +400,22 @@ The Narrator includes a CLI tool for database management:
347
400
 
348
401
  ```bash
349
402
  # Initialize database tables
350
- narrator-db init --database-url "postgresql+asyncpg://user:pass@localhost/dbname"
403
+ uv run narrator init --database-url "postgresql+asyncpg://user:pass@localhost/dbname"
351
404
 
352
405
  # Initialize using environment variable
353
406
  export NARRATOR_DATABASE_URL="postgresql+asyncpg://user:pass@localhost/dbname"
354
- narrator-db init
407
+ uv run narrator init
355
408
 
356
409
  # Check database status
357
- narrator-db status --database-url "postgresql+asyncpg://user:pass@localhost/dbname"
410
+ uv run narrator status --database-url "postgresql+asyncpg://user:pass@localhost/dbname"
358
411
 
359
412
  # Check status using environment variable
360
- narrator-db status
413
+ uv run narrator status
361
414
  ```
362
415
 
363
416
  Available commands:
364
- - `narrator-db init` - Initialize database tables
365
- - `narrator-db status` - Check database connection and basic statistics
417
+ - `uv run narrator init` - Initialize database tables
418
+ - `uv run narrator status` - Check database connection and basic statistics
366
419
 
367
420
  ## Key Design Principles
368
421
 
@@ -445,7 +498,7 @@ uv run pytest tests/ -v
445
498
  ### Test Requirements
446
499
 
447
500
  The test suite requires:
448
- - Python 3.12+
501
+ - Python 3.13+
449
502
  - pytest with async support
450
503
  - Test coverage reporting
451
504
  - System dependencies (libmagic for file type detection)
@@ -8,7 +8,7 @@ from .models.thread import Thread
8
8
  from .models.message import Message
9
9
  from .models.attachment import Attachment
10
10
 
11
- __version__ = "1.0.0"
11
+ __version__ = "1.0.2"
12
12
  __all__ = [
13
13
  "ThreadStore",
14
14
  "FileStore",
@@ -0,0 +1,222 @@
1
+ """Database CLI for Tyler Stores"""
2
+ import asyncio
3
+ import os
4
+ import click
5
+ import functools
6
+ import subprocess
7
+ import tempfile
8
+ import time
9
+ from pathlib import Path
10
+ from .thread_store import ThreadStore
11
+ from ..utils.logging import get_logger
12
+
13
+ logger = get_logger(__name__)
14
+
15
+ @click.group()
16
+ def main():
17
+ """Narrator CLI - Database management commands"""
18
+ pass
19
+
20
+ @main.command()
21
+ @click.option('--database-url', help='Database URL for initialization')
22
+ def init(database_url):
23
+ """Initialize database tables"""
24
+ async def _init():
25
+ try:
26
+ # Use provided URL or check environment variable
27
+ url = database_url or os.environ.get('NARRATOR_DATABASE_URL')
28
+
29
+ if url:
30
+ store = await ThreadStore.create(url)
31
+ else:
32
+ # Use in-memory storage
33
+ store = await ThreadStore.create()
34
+
35
+ logger.info("Database initialized successfully")
36
+ click.echo("Database initialized successfully")
37
+ except Exception as e:
38
+ logger.error(f"Failed to initialize database: {e}")
39
+ click.echo(f"Error: Failed to initialize database: {e}")
40
+ raise click.Abort()
41
+
42
+ asyncio.run(_init())
43
+
44
+ @main.command()
45
+ @click.option('--database-url', help='Database URL')
46
+ def status(database_url):
47
+ """Check database status"""
48
+ async def _status():
49
+ try:
50
+ # Use provided URL or check environment variable
51
+ url = database_url or os.environ.get('NARRATOR_DATABASE_URL')
52
+
53
+ if url:
54
+ store = await ThreadStore.create(url)
55
+ else:
56
+ store = await ThreadStore.create()
57
+
58
+ # Get some basic stats
59
+ threads = await store.list_recent(limit=5)
60
+ click.echo(f"Database connection: OK")
61
+ click.echo(f"Recent threads count: {len(threads)}")
62
+
63
+ except Exception as e:
64
+ logger.error(f"Database status check failed: {e}")
65
+ click.echo(f"Error: Database status check failed: {e}")
66
+ raise click.Abort()
67
+
68
+ asyncio.run(_status())
69
+
70
+ @main.command()
71
+ @click.option('--port', help='Port to expose PostgreSQL on (default: 5432 or NARRATOR_DB_PORT)')
72
+ @click.option('--detach/--no-detach', default=True, help='Run container in background (default: True)')
73
+ def docker_start(port, detach):
74
+ """Start a PostgreSQL container for Narrator"""
75
+ # Use environment variables with defaults matching docker-compose.yml
76
+ db_name = os.environ.get('NARRATOR_DB_NAME', 'narrator')
77
+ db_user = os.environ.get('NARRATOR_DB_USER', 'narrator')
78
+ db_password = os.environ.get('NARRATOR_DB_PASSWORD', 'narrator_dev')
79
+ db_port = port or os.environ.get('NARRATOR_DB_PORT', '5432')
80
+
81
+ docker_compose_content = f"""services:
82
+ postgres:
83
+ image: postgres:16
84
+ container_name: narrator-postgres
85
+ environment:
86
+ POSTGRES_DB: {db_name}
87
+ POSTGRES_USER: {db_user}
88
+ POSTGRES_PASSWORD: {db_password}
89
+ ports:
90
+ - "{db_port}:5432"
91
+ volumes:
92
+ - narrator_postgres_data:/var/lib/postgresql/data
93
+ healthcheck:
94
+ test: ["CMD-SHELL", "pg_isready -U {db_user}"]
95
+ interval: 5s
96
+ timeout: 5s
97
+ retries: 5
98
+
99
+ volumes:
100
+ narrator_postgres_data:
101
+ """
102
+
103
+ # Create a temporary directory for docker-compose.yml
104
+ with tempfile.TemporaryDirectory() as tmpdir:
105
+ compose_file = Path(tmpdir) / "docker-compose.yml"
106
+ compose_file.write_text(docker_compose_content)
107
+
108
+ # Check if docker is available
109
+ try:
110
+ subprocess.run(["docker", "--version"], capture_output=True, check=True)
111
+ except (subprocess.CalledProcessError, FileNotFoundError):
112
+ click.echo("❌ Docker is not installed or not available in PATH")
113
+ raise click.Abort()
114
+
115
+ # Check if docker-compose or docker compose is available
116
+ compose_cmd = None
117
+ try:
118
+ subprocess.run(["docker", "compose", "version"], capture_output=True, check=True)
119
+ compose_cmd = ["docker", "compose"]
120
+ except (subprocess.CalledProcessError, FileNotFoundError):
121
+ try:
122
+ subprocess.run(["docker-compose", "version"], capture_output=True, check=True)
123
+ compose_cmd = ["docker-compose"]
124
+ except (subprocess.CalledProcessError, FileNotFoundError):
125
+ click.echo("❌ Docker Compose is not installed")
126
+ raise click.Abort()
127
+
128
+ # Start the container
129
+ click.echo("📦 Starting PostgreSQL container...")
130
+ cmd = compose_cmd + ["up"]
131
+ if detach:
132
+ cmd.append("-d")
133
+
134
+ result = subprocess.run(cmd, cwd=tmpdir)
135
+
136
+ if result.returncode != 0:
137
+ click.echo("❌ Failed to start PostgreSQL container")
138
+ raise click.Abort()
139
+
140
+ if detach:
141
+ # Wait for PostgreSQL to be ready
142
+ click.echo("⏳ Waiting for PostgreSQL to be ready...")
143
+ for i in range(30):
144
+ result = subprocess.run(
145
+ ["docker", "exec", "narrator-postgres", "pg_isready", "-U", db_user],
146
+ capture_output=True
147
+ )
148
+ if result.returncode == 0:
149
+ click.echo("✅ PostgreSQL is ready!")
150
+ click.echo(f"\n🎉 Database available at:")
151
+ click.echo(f" postgresql+asyncpg://{db_user}:{db_password}@localhost:{db_port}/{db_name}")
152
+ return
153
+ time.sleep(1)
154
+
155
+ click.echo("❌ PostgreSQL failed to start after 30 seconds")
156
+ raise click.Abort()
157
+
158
+ @main.command()
159
+ @click.option('--remove-volumes', is_flag=True, help='Remove data volumes (destroys all data)')
160
+ def docker_stop(remove_volumes):
161
+ """Stop the PostgreSQL container"""
162
+ # Check if docker is available
163
+ try:
164
+ subprocess.run(["docker", "--version"], capture_output=True, check=True)
165
+ except (subprocess.CalledProcessError, FileNotFoundError):
166
+ click.echo("❌ Docker is not installed or not available in PATH")
167
+ raise click.Abort()
168
+
169
+ # Check if container exists
170
+ result = subprocess.run(
171
+ ["docker", "ps", "-a", "--format", "{{.Names}}"],
172
+ capture_output=True,
173
+ text=True
174
+ )
175
+
176
+ if "narrator-postgres" not in result.stdout:
177
+ click.echo("ℹ️ No Narrator PostgreSQL container found")
178
+ return
179
+
180
+ click.echo("🛑 Stopping PostgreSQL container...")
181
+
182
+ # Stop the container
183
+ subprocess.run(["docker", "stop", "narrator-postgres"], check=False)
184
+ subprocess.run(["docker", "rm", "narrator-postgres"], check=False)
185
+
186
+ if remove_volumes:
187
+ click.echo("🗑️ Removing data volume...")
188
+ subprocess.run(["docker", "volume", "rm", "narrator_postgres_data"], check=False)
189
+ click.echo("✅ Container and data removed")
190
+ else:
191
+ click.echo("✅ Container stopped (data preserved)")
192
+
193
+ @main.command()
194
+ @click.option('--port', help='Port to expose PostgreSQL on (default: 5432 or NARRATOR_DB_PORT)')
195
+ def docker_setup(port):
196
+ """One-command Docker setup: start PostgreSQL and initialize tables"""
197
+ # Start PostgreSQL
198
+ ctx = click.get_current_context()
199
+ ctx.invoke(docker_start, port=port, detach=True)
200
+
201
+ # Get database configuration from environment or defaults
202
+ db_name = os.environ.get('NARRATOR_DB_NAME', 'narrator')
203
+ db_user = os.environ.get('NARRATOR_DB_USER', 'narrator')
204
+ db_password = os.environ.get('NARRATOR_DB_PASSWORD', 'narrator_dev')
205
+ db_port = port or os.environ.get('NARRATOR_DB_PORT', '5432')
206
+
207
+ # Set up database URL
208
+ database_url = f"postgresql+asyncpg://{db_user}:{db_password}@localhost:{db_port}/{db_name}"
209
+ os.environ['NARRATOR_DATABASE_URL'] = database_url
210
+
211
+ # Initialize tables
212
+ click.echo("\n🔧 Initializing database tables...")
213
+ ctx.invoke(init, database_url=database_url)
214
+
215
+ click.echo("\n🎉 Setup complete! Your database is ready.")
216
+ click.echo("\nTo use in your code:")
217
+ click.echo(f'export NARRATOR_DATABASE_URL="{database_url}"')
218
+ click.echo("\nTo stop the container: narrator docker-stop")
219
+ click.echo("To remove all data: narrator docker-stop --remove-volumes")
220
+
221
+ if __name__ == '__main__':
222
+ main()
@@ -1,6 +1,7 @@
1
1
  """Storage backend implementations for ThreadStore."""
2
2
  from abc import ABC, abstractmethod
3
3
  from typing import List, Optional, Dict, Any, Union
4
+ import re
4
5
  from datetime import datetime, UTC
5
6
  import json
6
7
  import os
@@ -20,6 +21,12 @@ from .models import Base, ThreadRecord, MessageRecord
20
21
 
21
22
  logger = get_logger(__name__)
22
23
 
24
+ def _sanitize_key(component: str) -> str:
25
+ """Allow only alphanumeric and underscore for JSON path keys to avoid SQL injection."""
26
+ if not re.fullmatch(r"[A-Za-z0-9_]+", component):
27
+ raise ValueError(f"Invalid key component: {component}")
28
+ return component
29
+
23
30
  class StorageBackend(ABC):
24
31
  """Abstract base class for thread storage backends."""
25
32
 
@@ -149,28 +156,22 @@ class MemoryBackend(StorageBackend):
149
156
  Returns:
150
157
  List of messages matching the criteria (possibly empty)
151
158
  """
159
+ matches: List[Message] = []
152
160
  # Traverse all threads and messages
153
161
  for thread in self._threads.values():
154
162
  for message in thread.messages:
155
- # Use the path to navigate to the target attribute
156
- current = message.model_dump(mode="python")
157
-
163
+ current: Any = message.model_dump(mode="python")
158
164
  # Navigate the nested structure
159
- parts = path.split('.')
165
+ parts = [p for p in path.split('.') if p]
160
166
  for part in parts:
161
167
  if isinstance(current, dict) and part in current:
162
168
  current = current[part]
163
169
  else:
164
170
  current = None
165
171
  break
166
-
167
- # Check if we found a match
168
172
  if current == value:
169
- # For MemoryBackend, we can't return MessageRecord objects
170
- # Return a list containing the message data that the ThreadStore can handle
171
- return [message]
172
-
173
- return []
173
+ matches.append(message)
174
+ return matches
174
175
 
175
176
  class SQLBackend(StorageBackend):
176
177
  """SQL storage backend supporting both SQLite and PostgreSQL with proper connection pooling."""
@@ -437,7 +438,17 @@ class SQLBackend(StorageBackend):
437
438
  for key, value in attributes.items():
438
439
  if self.database_url.startswith('sqlite'):
439
440
  # Use SQLite json_extract
440
- query = query.where(text(f"json_extract(attributes, '$.{key}') = :value").bindparams(value=str(value)))
441
+ safe_key = _sanitize_key(key)
442
+ if value is None:
443
+ query = query.where(text(f"json_extract(attributes, '$.{safe_key}') IS NULL"))
444
+ elif isinstance(value, bool):
445
+ # SQLite stores booleans as 1/0
446
+ num_val = 1 if value else 0
447
+ query = query.where(text(f"json_extract(attributes, '$.{safe_key}') = {num_val}"))
448
+ else:
449
+ query = query.where(
450
+ text(f"json_extract(attributes, '$.{safe_key}') = :value").bindparams(value=str(value))
451
+ )
441
452
  else:
442
453
  # Use PostgreSQL JSONB operators via text() for direct SQL control
443
454
  logger.info(f"Searching for attribute[{key}] = {value} (type: {type(value)})")
@@ -445,7 +456,8 @@ class SQLBackend(StorageBackend):
445
456
  # Handle different value types appropriately
446
457
  if value is None:
447
458
  # Check for null/None values
448
- query = query.where(text(f"attributes->>'{key}' IS NULL"))
459
+ safe_key = _sanitize_key(key)
460
+ query = query.where(text(f"attributes->>'{safe_key}' IS NULL"))
449
461
  else:
450
462
  # Convert value to string for text comparison
451
463
  str_value = str(value)
@@ -454,10 +466,11 @@ class SQLBackend(StorageBackend):
454
466
  str_value = str(value).lower()
455
467
 
456
468
  # Use PostgreSQL's JSONB operators for direct string comparison
457
- param_name = f"attr_{key}"
469
+ safe_key = _sanitize_key(key)
470
+ param_name = f"attr_{safe_key}"
458
471
  bp = bindparam(param_name, str_value)
459
472
  query = query.where(
460
- text(f"attributes->>'{key}' = :{param_name}").bindparams(bp)
473
+ text(f"attributes->>'{safe_key}' = :{param_name}").bindparams(bp)
461
474
  )
462
475
 
463
476
  # Log the final query for debugging
@@ -481,28 +494,38 @@ class SQLBackend(StorageBackend):
481
494
 
482
495
  if self.database_url.startswith('sqlite'):
483
496
  # Use SQLite json_extract for platform name
484
- query = query.where(text(f"json_extract(platforms, '$.{platform_name}') IS NOT NULL"))
497
+ safe_platform = _sanitize_key(platform_name)
498
+ query = query.where(text(f"json_extract(platforms, '$.{safe_platform}') IS NOT NULL"))
485
499
  # Add property conditions
486
500
  for key, value in properties.items():
487
501
  # Convert value to string for text comparison
488
- str_value = str(value)
489
- param_name = f"value_{platform_name}_{key}" # Ensure unique param name
490
- bp = bindparam(param_name, str_value)
491
- query = query.where(
492
- text(f"json_extract(platforms, '$.{platform_name}.{key}') = :{param_name}")
493
- .bindparams(bp)
494
- )
502
+ safe_key = _sanitize_key(key)
503
+ if value is None:
504
+ query = query.where(text(f"json_extract(platforms, '$.{safe_platform}.{safe_key}') IS NULL"))
505
+ elif isinstance(value, bool):
506
+ num_val = 1 if value else 0
507
+ query = query.where(text(f"json_extract(platforms, '$.{safe_platform}.{safe_key}') = {num_val}"))
508
+ else:
509
+ str_value = str(value)
510
+ param_name = f"value_{safe_platform}_{safe_key}" # Ensure unique param name
511
+ bp = bindparam(param_name, str_value)
512
+ query = query.where(
513
+ text(f"json_extract(platforms, '$.{safe_platform}.{safe_key}') = :{param_name}")
514
+ .bindparams(bp)
515
+ )
495
516
  else:
496
517
  # Use PostgreSQL JSONB operators for platform checks
497
- query = query.where(text(f"platforms ? '{platform_name}'"))
518
+ safe_platform = _sanitize_key(platform_name)
519
+ query = query.where(text(f"platforms ? '{safe_platform}'"))
498
520
 
499
521
  # Add property conditions with text() for proper PostgreSQL JSONB syntax
500
522
  for key, value in properties.items():
501
523
  str_value = str(value)
502
- param_name = f"value_{platform_name}_{key}"
524
+ safe_key = _sanitize_key(key)
525
+ param_name = f"value_{safe_platform}_{safe_key}"
503
526
  bp = bindparam(param_name, str_value)
504
527
  query = query.where(
505
- text(f"platforms->'{platform_name}'->>'{key}' = :{param_name}")
528
+ text(f"platforms->'{safe_platform}'->>'{safe_key}' = :{param_name}")
506
529
  .bindparams(bp)
507
530
  )
508
531
 
@@ -540,16 +563,33 @@ class SQLBackend(StorageBackend):
540
563
  try:
541
564
  query = select(MessageRecord)
542
565
 
566
+ # Normalize and sanitize path parts
567
+ parts = [p for p in path.split('.') if p]
568
+ parts = [_sanitize_key(p) for p in parts]
569
+ if not parts:
570
+ return []
571
+ # Support paths prefixed with 'source.' by stripping the leading component
572
+ if parts and parts[0] == 'source':
573
+ parts = parts[1:]
574
+ if not parts:
575
+ return []
543
576
  if self.database_url.startswith('sqlite'):
544
- # Use SQLite json_extract
545
- json_path = '$.' + path.replace('.', '.')
546
- query = query.where(text(f"json_extract(source, '{json_path}') = :value").bindparams(value=str(value)))
577
+ # Use SQLite json_extract with a proper JSON path: $.a.b.c (safe due to sanitized parts)
578
+ json_path = '$.' + '.'.join(parts)
579
+ query = query.where(
580
+ text(f"json_extract(source, '{json_path}') = :value").bindparams(value=str(value))
581
+ )
547
582
  else:
548
- # Use PostgreSQL JSONB operators
549
- # Convert dot notation to PostgreSQL JSON path
550
- path_parts = path.split('.')
551
- json_path = '->'.join([f"'{part}'" for part in path_parts[:-1]]) + f"->>'{path_parts[-1]}'"
552
- query = query.where(text(f"source{json_path} = :value").bindparams(value=str(value)))
583
+ # Use PostgreSQL JSONB operators: source->'a'->'b'->>'c' (last part text)
584
+ if len(parts) == 1:
585
+ pg_expr = f"source->>'{parts[0]}'"
586
+ else:
587
+ head = parts[:-1]
588
+ tail = parts[-1]
589
+ pg_expr = "source" + ''.join([f"->'{h}'" for h in head]) + f"->>'{tail}'"
590
+ query = query.where(
591
+ text(f"{pg_expr} = :value").bindparams(value=str(value))
592
+ )
553
593
 
554
594
  result = await session.execute(query)
555
595
  return result.scalars().all()
@@ -494,13 +494,7 @@ class FileStore:
494
494
  files.append(file_id)
495
495
  return files
496
496
 
497
- def _handle_data_url(self, content: bytes) -> bytes:
498
- """Handle data URLs"""
499
- if self.content.startswith('data:'):
500
- # Handle data URLs
501
- header, encoded = self.content.split(",", 1)
502
- return base64.b64decode(encoded)
503
- return content
497
+ # Note: data URL handling is performed at the Attachment layer where the content type is known.
504
498
 
505
499
  @classmethod
506
500
  def get_base_path(cls) -> str:
@@ -3,36 +3,30 @@ import os
3
3
  import logging
4
4
  from typing import Optional
5
5
 
6
+ class _NarratorNullHandler(logging.Handler):
7
+ def emit(self, record):
8
+ pass
9
+
6
10
  _is_configured = False
7
11
 
8
12
  def _ensure_logging_configured():
9
- """Internal function to configure logging if not already configured."""
13
+ """Attach a NullHandler and optionally set level based on env without overriding app config."""
10
14
  global _is_configured
11
15
  if _is_configured:
12
16
  return
13
17
 
14
- # Get log level from environment and convert to uppercase
15
- log_level_str = os.getenv('NARRATOR_LOG_LEVEL', os.getenv('LOG_LEVEL', 'INFO')).upper()
16
-
17
- # Convert string to logging level constant
18
- try:
19
- log_level = getattr(logging, log_level_str)
20
- except AttributeError:
21
- print(f"Invalid LOG_LEVEL: {log_level_str}. Defaulting to INFO.")
22
- log_level = logging.INFO
23
-
24
- # Configure the root logger with our format
25
- logging.basicConfig(
26
- level=log_level,
27
- format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
28
- datefmt='%H:%M:%S',
29
- force=True # Ensure we override any existing configuration
30
- )
31
-
32
- # Get the root logger and set its level
33
- root_logger = logging.getLogger()
34
- root_logger.setLevel(log_level)
35
-
18
+ logger = logging.getLogger('narrator')
19
+ # Avoid duplicate handlers
20
+ if not any(isinstance(h, _NarratorNullHandler) for h in logger.handlers):
21
+ logger.addHandler(_NarratorNullHandler())
22
+
23
+ # Respect env level but do not call basicConfig or force reconfigure
24
+ log_level_str = os.getenv('NARRATOR_LOG_LEVEL', os.getenv('LOG_LEVEL', '')).upper()
25
+ if log_level_str:
26
+ level = getattr(logging, log_level_str, None)
27
+ if isinstance(level, int):
28
+ logger.setLevel(level)
29
+
36
30
  _is_configured = True
37
31
 
38
32
  def get_logger(name: Optional[str] = None) -> logging.Logger:
@@ -55,4 +49,4 @@ def get_logger(name: Optional[str] = None) -> logging.Logger:
55
49
  logger.debug("Debug message") # Will respect NARRATOR_LOG_LEVEL from .env
56
50
  """
57
51
  _ensure_logging_configured()
58
- return logging.getLogger(name or '__name__')
52
+ return logging.getLogger(name or 'narrator.unknown')
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
4
4
 
5
5
  [project]
6
6
  name = "slide-narrator"
7
- version = "1.0.0"
7
+ version = "1.0.2"
8
8
  description = "Thread and file storage components for conversational AI - the companion to Tyler AI framework"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.11"
@@ -18,6 +18,7 @@ classifiers = [
18
18
  "Programming Language :: Python :: 3",
19
19
  "Programming Language :: Python :: 3.11",
20
20
  "Programming Language :: Python :: 3.12",
21
+ "Programming Language :: Python :: 3.13",
21
22
  "Operating System :: OS Independent",
22
23
  "License :: OSI Approved :: MIT License",
23
24
  ]
@@ -28,7 +29,6 @@ dependencies = [
28
29
  "aiosqlite>=0.21.0",
29
30
  "python-magic>=0.4.0",
30
31
  "pydantic>=2.10.4",
31
- "uuid_utils>=0.10.0",
32
32
  "pypdf>=5.3.0",
33
33
  "click>=8.1.8",
34
34
  "greenlet>=3.2.3",
@@ -48,7 +48,7 @@ Repository = "https://github.com/adamwdraper/slide"
48
48
  "Bug Tracker" = "https://github.com/adamwdraper/slide/issues"
49
49
 
50
50
  [project.scripts]
51
- narrator-db = "narrator.database.cli:main"
51
+ narrator = "narrator.database.cli:main"
52
52
 
53
53
  [tool.uv.sources]
54
54
  # No workspace dependencies for narrator currently
@@ -1,66 +0,0 @@
1
- """Database CLI for Tyler Stores"""
2
- import asyncio
3
- import click
4
- from .thread_store import ThreadStore
5
- from ..utils.logging import get_logger
6
-
7
- logger = get_logger(__name__)
8
-
9
- @click.group()
10
- def main():
11
- """Tyler Stores Database CLI"""
12
- pass
13
-
14
- @click.command()
15
- @click.option('--database-url', help='Database URL for initialization')
16
- async def init(database_url):
17
- """Initialize database tables"""
18
- try:
19
- if database_url:
20
- store = await ThreadStore.create(database_url)
21
- else:
22
- # Use environment variables or default
23
- store = await ThreadStore.create()
24
-
25
- logger.info("Database initialized successfully")
26
- click.echo("Database initialized successfully")
27
- except Exception as e:
28
- logger.error(f"Failed to initialize database: {e}")
29
- click.echo(f"Error: Failed to initialize database: {e}")
30
- raise click.Abort()
31
-
32
- @click.command()
33
- @click.option('--database-url', help='Database URL')
34
- async def status(database_url):
35
- """Check database status"""
36
- try:
37
- if database_url:
38
- store = await ThreadStore.create(database_url)
39
- else:
40
- store = await ThreadStore.create()
41
-
42
- # Get some basic stats
43
- threads = await store.list_recent(limit=5)
44
- click.echo(f"Database connection: OK")
45
- click.echo(f"Recent threads count: {len(threads)}")
46
-
47
- except Exception as e:
48
- logger.error(f"Database status check failed: {e}")
49
- click.echo(f"Error: Database status check failed: {e}")
50
- raise click.Abort()
51
-
52
- # Add async wrapper for commands
53
- def async_command(f):
54
- def wrapper(*args, **kwargs):
55
- return asyncio.run(f(*args, **kwargs))
56
- return wrapper
57
-
58
- # Apply async wrapper to commands
59
- init = click.command()(async_command(init))
60
- status = click.command()(async_command(status))
61
-
62
- main.add_command(init)
63
- main.add_command(status)
64
-
65
- if __name__ == '__main__':
66
- main()
File without changes