slide-narrator 1.0.0__py3-none-any.whl → 1.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of slide-narrator might be problematic. Click here for more details.
- narrator/__init__.py +1 -1
- narrator/database/cli.py +198 -42
- narrator/database/storage_backend.py +74 -34
- narrator/storage/file_store.py +1 -7
- narrator/utils/logging.py +18 -24
- {slide_narrator-1.0.0.dist-info → slide_narrator-1.0.2.dist-info}/METADATA +67 -14
- {slide_narrator-1.0.0.dist-info → slide_narrator-1.0.2.dist-info}/RECORD +10 -10
- slide_narrator-1.0.2.dist-info/entry_points.txt +2 -0
- slide_narrator-1.0.0.dist-info/entry_points.txt +0 -2
- {slide_narrator-1.0.0.dist-info → slide_narrator-1.0.2.dist-info}/WHEEL +0 -0
- {slide_narrator-1.0.0.dist-info → slide_narrator-1.0.2.dist-info}/licenses/LICENSE +0 -0
narrator/__init__.py
CHANGED
narrator/database/cli.py
CHANGED
|
@@ -1,6 +1,12 @@
|
|
|
1
1
|
"""Database CLI for Tyler Stores"""
|
|
2
2
|
import asyncio
|
|
3
|
+
import os
|
|
3
4
|
import click
|
|
5
|
+
import functools
|
|
6
|
+
import subprocess
|
|
7
|
+
import tempfile
|
|
8
|
+
import time
|
|
9
|
+
from pathlib import Path
|
|
4
10
|
from .thread_store import ThreadStore
|
|
5
11
|
from ..utils.logging import get_logger
|
|
6
12
|
|
|
@@ -8,59 +14,209 @@ logger = get_logger(__name__)
|
|
|
8
14
|
|
|
9
15
|
@click.group()
|
|
10
16
|
def main():
|
|
11
|
-
"""
|
|
17
|
+
"""Narrator CLI - Database management commands"""
|
|
12
18
|
pass
|
|
13
19
|
|
|
14
|
-
@
|
|
20
|
+
@main.command()
|
|
15
21
|
@click.option('--database-url', help='Database URL for initialization')
|
|
16
|
-
|
|
22
|
+
def init(database_url):
|
|
17
23
|
"""Initialize database tables"""
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
24
|
+
async def _init():
|
|
25
|
+
try:
|
|
26
|
+
# Use provided URL or check environment variable
|
|
27
|
+
url = database_url or os.environ.get('NARRATOR_DATABASE_URL')
|
|
28
|
+
|
|
29
|
+
if url:
|
|
30
|
+
store = await ThreadStore.create(url)
|
|
31
|
+
else:
|
|
32
|
+
# Use in-memory storage
|
|
33
|
+
store = await ThreadStore.create()
|
|
34
|
+
|
|
35
|
+
logger.info("Database initialized successfully")
|
|
36
|
+
click.echo("Database initialized successfully")
|
|
37
|
+
except Exception as e:
|
|
38
|
+
logger.error(f"Failed to initialize database: {e}")
|
|
39
|
+
click.echo(f"Error: Failed to initialize database: {e}")
|
|
40
|
+
raise click.Abort()
|
|
41
|
+
|
|
42
|
+
asyncio.run(_init())
|
|
31
43
|
|
|
32
|
-
@
|
|
44
|
+
@main.command()
|
|
33
45
|
@click.option('--database-url', help='Database URL')
|
|
34
|
-
|
|
46
|
+
def status(database_url):
|
|
35
47
|
"""Check database status"""
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
48
|
+
async def _status():
|
|
49
|
+
try:
|
|
50
|
+
# Use provided URL or check environment variable
|
|
51
|
+
url = database_url or os.environ.get('NARRATOR_DATABASE_URL')
|
|
52
|
+
|
|
53
|
+
if url:
|
|
54
|
+
store = await ThreadStore.create(url)
|
|
55
|
+
else:
|
|
56
|
+
store = await ThreadStore.create()
|
|
57
|
+
|
|
58
|
+
# Get some basic stats
|
|
59
|
+
threads = await store.list_recent(limit=5)
|
|
60
|
+
click.echo(f"Database connection: OK")
|
|
61
|
+
click.echo(f"Recent threads count: {len(threads)}")
|
|
62
|
+
|
|
63
|
+
except Exception as e:
|
|
64
|
+
logger.error(f"Database status check failed: {e}")
|
|
65
|
+
click.echo(f"Error: Database status check failed: {e}")
|
|
66
|
+
raise click.Abort()
|
|
67
|
+
|
|
68
|
+
asyncio.run(_status())
|
|
69
|
+
|
|
70
|
+
@main.command()
|
|
71
|
+
@click.option('--port', help='Port to expose PostgreSQL on (default: 5432 or NARRATOR_DB_PORT)')
|
|
72
|
+
@click.option('--detach/--no-detach', default=True, help='Run container in background (default: True)')
|
|
73
|
+
def docker_start(port, detach):
|
|
74
|
+
"""Start a PostgreSQL container for Narrator"""
|
|
75
|
+
# Use environment variables with defaults matching docker-compose.yml
|
|
76
|
+
db_name = os.environ.get('NARRATOR_DB_NAME', 'narrator')
|
|
77
|
+
db_user = os.environ.get('NARRATOR_DB_USER', 'narrator')
|
|
78
|
+
db_password = os.environ.get('NARRATOR_DB_PASSWORD', 'narrator_dev')
|
|
79
|
+
db_port = port or os.environ.get('NARRATOR_DB_PORT', '5432')
|
|
80
|
+
|
|
81
|
+
docker_compose_content = f"""services:
|
|
82
|
+
postgres:
|
|
83
|
+
image: postgres:16
|
|
84
|
+
container_name: narrator-postgres
|
|
85
|
+
environment:
|
|
86
|
+
POSTGRES_DB: {db_name}
|
|
87
|
+
POSTGRES_USER: {db_user}
|
|
88
|
+
POSTGRES_PASSWORD: {db_password}
|
|
89
|
+
ports:
|
|
90
|
+
- "{db_port}:5432"
|
|
91
|
+
volumes:
|
|
92
|
+
- narrator_postgres_data:/var/lib/postgresql/data
|
|
93
|
+
healthcheck:
|
|
94
|
+
test: ["CMD-SHELL", "pg_isready -U {db_user}"]
|
|
95
|
+
interval: 5s
|
|
96
|
+
timeout: 5s
|
|
97
|
+
retries: 5
|
|
98
|
+
|
|
99
|
+
volumes:
|
|
100
|
+
narrator_postgres_data:
|
|
101
|
+
"""
|
|
102
|
+
|
|
103
|
+
# Create a temporary directory for docker-compose.yml
|
|
104
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
105
|
+
compose_file = Path(tmpdir) / "docker-compose.yml"
|
|
106
|
+
compose_file.write_text(docker_compose_content)
|
|
41
107
|
|
|
42
|
-
#
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
108
|
+
# Check if docker is available
|
|
109
|
+
try:
|
|
110
|
+
subprocess.run(["docker", "--version"], capture_output=True, check=True)
|
|
111
|
+
except (subprocess.CalledProcessError, FileNotFoundError):
|
|
112
|
+
click.echo("❌ Docker is not installed or not available in PATH")
|
|
113
|
+
raise click.Abort()
|
|
46
114
|
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
115
|
+
# Check if docker-compose or docker compose is available
|
|
116
|
+
compose_cmd = None
|
|
117
|
+
try:
|
|
118
|
+
subprocess.run(["docker", "compose", "version"], capture_output=True, check=True)
|
|
119
|
+
compose_cmd = ["docker", "compose"]
|
|
120
|
+
except (subprocess.CalledProcessError, FileNotFoundError):
|
|
121
|
+
try:
|
|
122
|
+
subprocess.run(["docker-compose", "version"], capture_output=True, check=True)
|
|
123
|
+
compose_cmd = ["docker-compose"]
|
|
124
|
+
except (subprocess.CalledProcessError, FileNotFoundError):
|
|
125
|
+
click.echo("❌ Docker Compose is not installed")
|
|
126
|
+
raise click.Abort()
|
|
127
|
+
|
|
128
|
+
# Start the container
|
|
129
|
+
click.echo("📦 Starting PostgreSQL container...")
|
|
130
|
+
cmd = compose_cmd + ["up"]
|
|
131
|
+
if detach:
|
|
132
|
+
cmd.append("-d")
|
|
133
|
+
|
|
134
|
+
result = subprocess.run(cmd, cwd=tmpdir)
|
|
135
|
+
|
|
136
|
+
if result.returncode != 0:
|
|
137
|
+
click.echo("❌ Failed to start PostgreSQL container")
|
|
138
|
+
raise click.Abort()
|
|
139
|
+
|
|
140
|
+
if detach:
|
|
141
|
+
# Wait for PostgreSQL to be ready
|
|
142
|
+
click.echo("⏳ Waiting for PostgreSQL to be ready...")
|
|
143
|
+
for i in range(30):
|
|
144
|
+
result = subprocess.run(
|
|
145
|
+
["docker", "exec", "narrator-postgres", "pg_isready", "-U", db_user],
|
|
146
|
+
capture_output=True
|
|
147
|
+
)
|
|
148
|
+
if result.returncode == 0:
|
|
149
|
+
click.echo("✅ PostgreSQL is ready!")
|
|
150
|
+
click.echo(f"\n🎉 Database available at:")
|
|
151
|
+
click.echo(f" postgresql+asyncpg://{db_user}:{db_password}@localhost:{db_port}/{db_name}")
|
|
152
|
+
return
|
|
153
|
+
time.sleep(1)
|
|
154
|
+
|
|
155
|
+
click.echo("❌ PostgreSQL failed to start after 30 seconds")
|
|
156
|
+
raise click.Abort()
|
|
57
157
|
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
158
|
+
@main.command()
|
|
159
|
+
@click.option('--remove-volumes', is_flag=True, help='Remove data volumes (destroys all data)')
|
|
160
|
+
def docker_stop(remove_volumes):
|
|
161
|
+
"""Stop the PostgreSQL container"""
|
|
162
|
+
# Check if docker is available
|
|
163
|
+
try:
|
|
164
|
+
subprocess.run(["docker", "--version"], capture_output=True, check=True)
|
|
165
|
+
except (subprocess.CalledProcessError, FileNotFoundError):
|
|
166
|
+
click.echo("❌ Docker is not installed or not available in PATH")
|
|
167
|
+
raise click.Abort()
|
|
168
|
+
|
|
169
|
+
# Check if container exists
|
|
170
|
+
result = subprocess.run(
|
|
171
|
+
["docker", "ps", "-a", "--format", "{{.Names}}"],
|
|
172
|
+
capture_output=True,
|
|
173
|
+
text=True
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
if "narrator-postgres" not in result.stdout:
|
|
177
|
+
click.echo("ℹ️ No Narrator PostgreSQL container found")
|
|
178
|
+
return
|
|
179
|
+
|
|
180
|
+
click.echo("🛑 Stopping PostgreSQL container...")
|
|
181
|
+
|
|
182
|
+
# Stop the container
|
|
183
|
+
subprocess.run(["docker", "stop", "narrator-postgres"], check=False)
|
|
184
|
+
subprocess.run(["docker", "rm", "narrator-postgres"], check=False)
|
|
185
|
+
|
|
186
|
+
if remove_volumes:
|
|
187
|
+
click.echo("🗑️ Removing data volume...")
|
|
188
|
+
subprocess.run(["docker", "volume", "rm", "narrator_postgres_data"], check=False)
|
|
189
|
+
click.echo("✅ Container and data removed")
|
|
190
|
+
else:
|
|
191
|
+
click.echo("✅ Container stopped (data preserved)")
|
|
61
192
|
|
|
62
|
-
main.
|
|
63
|
-
|
|
193
|
+
@main.command()
|
|
194
|
+
@click.option('--port', help='Port to expose PostgreSQL on (default: 5432 or NARRATOR_DB_PORT)')
|
|
195
|
+
def docker_setup(port):
|
|
196
|
+
"""One-command Docker setup: start PostgreSQL and initialize tables"""
|
|
197
|
+
# Start PostgreSQL
|
|
198
|
+
ctx = click.get_current_context()
|
|
199
|
+
ctx.invoke(docker_start, port=port, detach=True)
|
|
200
|
+
|
|
201
|
+
# Get database configuration from environment or defaults
|
|
202
|
+
db_name = os.environ.get('NARRATOR_DB_NAME', 'narrator')
|
|
203
|
+
db_user = os.environ.get('NARRATOR_DB_USER', 'narrator')
|
|
204
|
+
db_password = os.environ.get('NARRATOR_DB_PASSWORD', 'narrator_dev')
|
|
205
|
+
db_port = port or os.environ.get('NARRATOR_DB_PORT', '5432')
|
|
206
|
+
|
|
207
|
+
# Set up database URL
|
|
208
|
+
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@localhost:{db_port}/{db_name}"
|
|
209
|
+
os.environ['NARRATOR_DATABASE_URL'] = database_url
|
|
210
|
+
|
|
211
|
+
# Initialize tables
|
|
212
|
+
click.echo("\n🔧 Initializing database tables...")
|
|
213
|
+
ctx.invoke(init, database_url=database_url)
|
|
214
|
+
|
|
215
|
+
click.echo("\n🎉 Setup complete! Your database is ready.")
|
|
216
|
+
click.echo("\nTo use in your code:")
|
|
217
|
+
click.echo(f'export NARRATOR_DATABASE_URL="{database_url}"')
|
|
218
|
+
click.echo("\nTo stop the container: narrator docker-stop")
|
|
219
|
+
click.echo("To remove all data: narrator docker-stop --remove-volumes")
|
|
64
220
|
|
|
65
221
|
if __name__ == '__main__':
|
|
66
222
|
main()
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"""Storage backend implementations for ThreadStore."""
|
|
2
2
|
from abc import ABC, abstractmethod
|
|
3
3
|
from typing import List, Optional, Dict, Any, Union
|
|
4
|
+
import re
|
|
4
5
|
from datetime import datetime, UTC
|
|
5
6
|
import json
|
|
6
7
|
import os
|
|
@@ -20,6 +21,12 @@ from .models import Base, ThreadRecord, MessageRecord
|
|
|
20
21
|
|
|
21
22
|
logger = get_logger(__name__)
|
|
22
23
|
|
|
24
|
+
def _sanitize_key(component: str) -> str:
|
|
25
|
+
"""Allow only alphanumeric and underscore for JSON path keys to avoid SQL injection."""
|
|
26
|
+
if not re.fullmatch(r"[A-Za-z0-9_]+", component):
|
|
27
|
+
raise ValueError(f"Invalid key component: {component}")
|
|
28
|
+
return component
|
|
29
|
+
|
|
23
30
|
class StorageBackend(ABC):
|
|
24
31
|
"""Abstract base class for thread storage backends."""
|
|
25
32
|
|
|
@@ -149,28 +156,22 @@ class MemoryBackend(StorageBackend):
|
|
|
149
156
|
Returns:
|
|
150
157
|
List of messages matching the criteria (possibly empty)
|
|
151
158
|
"""
|
|
159
|
+
matches: List[Message] = []
|
|
152
160
|
# Traverse all threads and messages
|
|
153
161
|
for thread in self._threads.values():
|
|
154
162
|
for message in thread.messages:
|
|
155
|
-
|
|
156
|
-
current = message.model_dump(mode="python")
|
|
157
|
-
|
|
163
|
+
current: Any = message.model_dump(mode="python")
|
|
158
164
|
# Navigate the nested structure
|
|
159
|
-
parts = path.split('.')
|
|
165
|
+
parts = [p for p in path.split('.') if p]
|
|
160
166
|
for part in parts:
|
|
161
167
|
if isinstance(current, dict) and part in current:
|
|
162
168
|
current = current[part]
|
|
163
169
|
else:
|
|
164
170
|
current = None
|
|
165
171
|
break
|
|
166
|
-
|
|
167
|
-
# Check if we found a match
|
|
168
172
|
if current == value:
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
return [message]
|
|
172
|
-
|
|
173
|
-
return []
|
|
173
|
+
matches.append(message)
|
|
174
|
+
return matches
|
|
174
175
|
|
|
175
176
|
class SQLBackend(StorageBackend):
|
|
176
177
|
"""SQL storage backend supporting both SQLite and PostgreSQL with proper connection pooling."""
|
|
@@ -437,7 +438,17 @@ class SQLBackend(StorageBackend):
|
|
|
437
438
|
for key, value in attributes.items():
|
|
438
439
|
if self.database_url.startswith('sqlite'):
|
|
439
440
|
# Use SQLite json_extract
|
|
440
|
-
|
|
441
|
+
safe_key = _sanitize_key(key)
|
|
442
|
+
if value is None:
|
|
443
|
+
query = query.where(text(f"json_extract(attributes, '$.{safe_key}') IS NULL"))
|
|
444
|
+
elif isinstance(value, bool):
|
|
445
|
+
# SQLite stores booleans as 1/0
|
|
446
|
+
num_val = 1 if value else 0
|
|
447
|
+
query = query.where(text(f"json_extract(attributes, '$.{safe_key}') = {num_val}"))
|
|
448
|
+
else:
|
|
449
|
+
query = query.where(
|
|
450
|
+
text(f"json_extract(attributes, '$.{safe_key}') = :value").bindparams(value=str(value))
|
|
451
|
+
)
|
|
441
452
|
else:
|
|
442
453
|
# Use PostgreSQL JSONB operators via text() for direct SQL control
|
|
443
454
|
logger.info(f"Searching for attribute[{key}] = {value} (type: {type(value)})")
|
|
@@ -445,7 +456,8 @@ class SQLBackend(StorageBackend):
|
|
|
445
456
|
# Handle different value types appropriately
|
|
446
457
|
if value is None:
|
|
447
458
|
# Check for null/None values
|
|
448
|
-
|
|
459
|
+
safe_key = _sanitize_key(key)
|
|
460
|
+
query = query.where(text(f"attributes->>'{safe_key}' IS NULL"))
|
|
449
461
|
else:
|
|
450
462
|
# Convert value to string for text comparison
|
|
451
463
|
str_value = str(value)
|
|
@@ -454,10 +466,11 @@ class SQLBackend(StorageBackend):
|
|
|
454
466
|
str_value = str(value).lower()
|
|
455
467
|
|
|
456
468
|
# Use PostgreSQL's JSONB operators for direct string comparison
|
|
457
|
-
|
|
469
|
+
safe_key = _sanitize_key(key)
|
|
470
|
+
param_name = f"attr_{safe_key}"
|
|
458
471
|
bp = bindparam(param_name, str_value)
|
|
459
472
|
query = query.where(
|
|
460
|
-
text(f"attributes->>'{
|
|
473
|
+
text(f"attributes->>'{safe_key}' = :{param_name}").bindparams(bp)
|
|
461
474
|
)
|
|
462
475
|
|
|
463
476
|
# Log the final query for debugging
|
|
@@ -481,28 +494,38 @@ class SQLBackend(StorageBackend):
|
|
|
481
494
|
|
|
482
495
|
if self.database_url.startswith('sqlite'):
|
|
483
496
|
# Use SQLite json_extract for platform name
|
|
484
|
-
|
|
497
|
+
safe_platform = _sanitize_key(platform_name)
|
|
498
|
+
query = query.where(text(f"json_extract(platforms, '$.{safe_platform}') IS NOT NULL"))
|
|
485
499
|
# Add property conditions
|
|
486
500
|
for key, value in properties.items():
|
|
487
501
|
# Convert value to string for text comparison
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
.
|
|
494
|
-
|
|
502
|
+
safe_key = _sanitize_key(key)
|
|
503
|
+
if value is None:
|
|
504
|
+
query = query.where(text(f"json_extract(platforms, '$.{safe_platform}.{safe_key}') IS NULL"))
|
|
505
|
+
elif isinstance(value, bool):
|
|
506
|
+
num_val = 1 if value else 0
|
|
507
|
+
query = query.where(text(f"json_extract(platforms, '$.{safe_platform}.{safe_key}') = {num_val}"))
|
|
508
|
+
else:
|
|
509
|
+
str_value = str(value)
|
|
510
|
+
param_name = f"value_{safe_platform}_{safe_key}" # Ensure unique param name
|
|
511
|
+
bp = bindparam(param_name, str_value)
|
|
512
|
+
query = query.where(
|
|
513
|
+
text(f"json_extract(platforms, '$.{safe_platform}.{safe_key}') = :{param_name}")
|
|
514
|
+
.bindparams(bp)
|
|
515
|
+
)
|
|
495
516
|
else:
|
|
496
517
|
# Use PostgreSQL JSONB operators for platform checks
|
|
497
|
-
|
|
518
|
+
safe_platform = _sanitize_key(platform_name)
|
|
519
|
+
query = query.where(text(f"platforms ? '{safe_platform}'"))
|
|
498
520
|
|
|
499
521
|
# Add property conditions with text() for proper PostgreSQL JSONB syntax
|
|
500
522
|
for key, value in properties.items():
|
|
501
523
|
str_value = str(value)
|
|
502
|
-
|
|
524
|
+
safe_key = _sanitize_key(key)
|
|
525
|
+
param_name = f"value_{safe_platform}_{safe_key}"
|
|
503
526
|
bp = bindparam(param_name, str_value)
|
|
504
527
|
query = query.where(
|
|
505
|
-
text(f"platforms->'{
|
|
528
|
+
text(f"platforms->'{safe_platform}'->>'{safe_key}' = :{param_name}")
|
|
506
529
|
.bindparams(bp)
|
|
507
530
|
)
|
|
508
531
|
|
|
@@ -540,16 +563,33 @@ class SQLBackend(StorageBackend):
|
|
|
540
563
|
try:
|
|
541
564
|
query = select(MessageRecord)
|
|
542
565
|
|
|
566
|
+
# Normalize and sanitize path parts
|
|
567
|
+
parts = [p for p in path.split('.') if p]
|
|
568
|
+
parts = [_sanitize_key(p) for p in parts]
|
|
569
|
+
if not parts:
|
|
570
|
+
return []
|
|
571
|
+
# Support paths prefixed with 'source.' by stripping the leading component
|
|
572
|
+
if parts and parts[0] == 'source':
|
|
573
|
+
parts = parts[1:]
|
|
574
|
+
if not parts:
|
|
575
|
+
return []
|
|
543
576
|
if self.database_url.startswith('sqlite'):
|
|
544
|
-
# Use SQLite json_extract
|
|
545
|
-
json_path = '$.' +
|
|
546
|
-
query = query.where(
|
|
577
|
+
# Use SQLite json_extract with a proper JSON path: $.a.b.c (safe due to sanitized parts)
|
|
578
|
+
json_path = '$.' + '.'.join(parts)
|
|
579
|
+
query = query.where(
|
|
580
|
+
text(f"json_extract(source, '{json_path}') = :value").bindparams(value=str(value))
|
|
581
|
+
)
|
|
547
582
|
else:
|
|
548
|
-
# Use PostgreSQL JSONB operators
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
583
|
+
# Use PostgreSQL JSONB operators: source->'a'->'b'->>'c' (last part text)
|
|
584
|
+
if len(parts) == 1:
|
|
585
|
+
pg_expr = f"source->>'{parts[0]}'"
|
|
586
|
+
else:
|
|
587
|
+
head = parts[:-1]
|
|
588
|
+
tail = parts[-1]
|
|
589
|
+
pg_expr = "source" + ''.join([f"->'{h}'" for h in head]) + f"->>'{tail}'"
|
|
590
|
+
query = query.where(
|
|
591
|
+
text(f"{pg_expr} = :value").bindparams(value=str(value))
|
|
592
|
+
)
|
|
553
593
|
|
|
554
594
|
result = await session.execute(query)
|
|
555
595
|
return result.scalars().all()
|
narrator/storage/file_store.py
CHANGED
|
@@ -494,13 +494,7 @@ class FileStore:
|
|
|
494
494
|
files.append(file_id)
|
|
495
495
|
return files
|
|
496
496
|
|
|
497
|
-
|
|
498
|
-
"""Handle data URLs"""
|
|
499
|
-
if self.content.startswith('data:'):
|
|
500
|
-
# Handle data URLs
|
|
501
|
-
header, encoded = self.content.split(",", 1)
|
|
502
|
-
return base64.b64decode(encoded)
|
|
503
|
-
return content
|
|
497
|
+
# Note: data URL handling is performed at the Attachment layer where the content type is known.
|
|
504
498
|
|
|
505
499
|
@classmethod
|
|
506
500
|
def get_base_path(cls) -> str:
|
narrator/utils/logging.py
CHANGED
|
@@ -3,36 +3,30 @@ import os
|
|
|
3
3
|
import logging
|
|
4
4
|
from typing import Optional
|
|
5
5
|
|
|
6
|
+
class _NarratorNullHandler(logging.Handler):
|
|
7
|
+
def emit(self, record):
|
|
8
|
+
pass
|
|
9
|
+
|
|
6
10
|
_is_configured = False
|
|
7
11
|
|
|
8
12
|
def _ensure_logging_configured():
|
|
9
|
-
"""
|
|
13
|
+
"""Attach a NullHandler and optionally set level based on env without overriding app config."""
|
|
10
14
|
global _is_configured
|
|
11
15
|
if _is_configured:
|
|
12
16
|
return
|
|
13
17
|
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
level=log_level,
|
|
27
|
-
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
|
28
|
-
datefmt='%H:%M:%S',
|
|
29
|
-
force=True # Ensure we override any existing configuration
|
|
30
|
-
)
|
|
31
|
-
|
|
32
|
-
# Get the root logger and set its level
|
|
33
|
-
root_logger = logging.getLogger()
|
|
34
|
-
root_logger.setLevel(log_level)
|
|
35
|
-
|
|
18
|
+
logger = logging.getLogger('narrator')
|
|
19
|
+
# Avoid duplicate handlers
|
|
20
|
+
if not any(isinstance(h, _NarratorNullHandler) for h in logger.handlers):
|
|
21
|
+
logger.addHandler(_NarratorNullHandler())
|
|
22
|
+
|
|
23
|
+
# Respect env level but do not call basicConfig or force reconfigure
|
|
24
|
+
log_level_str = os.getenv('NARRATOR_LOG_LEVEL', os.getenv('LOG_LEVEL', '')).upper()
|
|
25
|
+
if log_level_str:
|
|
26
|
+
level = getattr(logging, log_level_str, None)
|
|
27
|
+
if isinstance(level, int):
|
|
28
|
+
logger.setLevel(level)
|
|
29
|
+
|
|
36
30
|
_is_configured = True
|
|
37
31
|
|
|
38
32
|
def get_logger(name: Optional[str] = None) -> logging.Logger:
|
|
@@ -55,4 +49,4 @@ def get_logger(name: Optional[str] = None) -> logging.Logger:
|
|
|
55
49
|
logger.debug("Debug message") # Will respect NARRATOR_LOG_LEVEL from .env
|
|
56
50
|
"""
|
|
57
51
|
_ensure_logging_configured()
|
|
58
|
-
return logging.getLogger(name or '
|
|
52
|
+
return logging.getLogger(name or 'narrator.unknown')
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: slide-narrator
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.2
|
|
4
4
|
Summary: Thread and file storage components for conversational AI - the companion to Tyler AI framework
|
|
5
5
|
Project-URL: Homepage, https://github.com/adamwdraper/slide
|
|
6
6
|
Project-URL: Repository, https://github.com/adamwdraper/slide
|
|
@@ -15,6 +15,7 @@ Classifier: Operating System :: OS Independent
|
|
|
15
15
|
Classifier: Programming Language :: Python :: 3
|
|
16
16
|
Classifier: Programming Language :: Python :: 3.11
|
|
17
17
|
Classifier: Programming Language :: Python :: 3.12
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
18
19
|
Requires-Python: >=3.11
|
|
19
20
|
Requires-Dist: aiosqlite>=0.21.0
|
|
20
21
|
Requires-Dist: alembic>=1.14.1
|
|
@@ -25,7 +26,6 @@ Requires-Dist: pydantic>=2.10.4
|
|
|
25
26
|
Requires-Dist: pypdf>=5.3.0
|
|
26
27
|
Requires-Dist: python-magic>=0.4.0
|
|
27
28
|
Requires-Dist: sqlalchemy>=2.0.36
|
|
28
|
-
Requires-Dist: uuid-utils>=0.10.0
|
|
29
29
|
Provides-Extra: dev
|
|
30
30
|
Requires-Dist: coverage>=7.6.10; extra == 'dev'
|
|
31
31
|
Requires-Dist: pytest-asyncio>=0.25.2; extra == 'dev'
|
|
@@ -74,19 +74,72 @@ pip install slide-narrator
|
|
|
74
74
|
|
|
75
75
|
## Setup
|
|
76
76
|
|
|
77
|
+
### Docker Setup (Recommended for Development)
|
|
78
|
+
|
|
79
|
+
For local development with PostgreSQL, Narrator includes built-in Docker commands:
|
|
80
|
+
|
|
81
|
+
```bash
|
|
82
|
+
# One-command setup: starts PostgreSQL and initializes tables
|
|
83
|
+
uv run narrator docker-setup
|
|
84
|
+
|
|
85
|
+
# This will:
|
|
86
|
+
# 1. Start a PostgreSQL container
|
|
87
|
+
# 2. Wait for it to be ready
|
|
88
|
+
# 3. Initialize the database tables
|
|
89
|
+
# 4. Show you the connection string
|
|
90
|
+
|
|
91
|
+
# The database will be available at:
|
|
92
|
+
# postgresql+asyncpg://narrator:narrator_dev@localhost:5432/narrator
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
To manage the Docker container:
|
|
96
|
+
|
|
97
|
+
```bash
|
|
98
|
+
# Stop container (preserves data)
|
|
99
|
+
uv run narrator docker-stop
|
|
100
|
+
|
|
101
|
+
# Stop and remove all data
|
|
102
|
+
uv run narrator docker-stop --remove-volumes
|
|
103
|
+
|
|
104
|
+
# Start container again
|
|
105
|
+
uv run narrator docker-start
|
|
106
|
+
|
|
107
|
+
# Check database status
|
|
108
|
+
uv run narrator status
|
|
109
|
+
```
|
|
110
|
+
|
|
111
|
+
For custom configurations, the Docker commands respect environment variables:
|
|
112
|
+
|
|
113
|
+
```bash
|
|
114
|
+
# Use a different port
|
|
115
|
+
uv run narrator docker-setup --port 5433
|
|
116
|
+
|
|
117
|
+
# Or set environment variables (matching docker-compose.yml)
|
|
118
|
+
export NARRATOR_DB_NAME=mydb
|
|
119
|
+
export NARRATOR_DB_USER=myuser
|
|
120
|
+
export NARRATOR_DB_PASSWORD=mypassword
|
|
121
|
+
export NARRATOR_DB_PORT=5433
|
|
122
|
+
|
|
123
|
+
# Then run docker-setup
|
|
124
|
+
uv run narrator docker-setup
|
|
125
|
+
|
|
126
|
+
# This will create:
|
|
127
|
+
# postgresql+asyncpg://myuser:mypassword@localhost:5433/mydb
|
|
128
|
+
```
|
|
129
|
+
|
|
77
130
|
### Database Setup
|
|
78
131
|
|
|
79
132
|
For production use with PostgreSQL or SQLite persistence, you'll need to initialize the database tables:
|
|
80
133
|
|
|
81
134
|
```bash
|
|
82
135
|
# Initialize database tables (PostgreSQL)
|
|
83
|
-
narrator
|
|
136
|
+
uv run narrator init --database-url "postgresql+asyncpg://user:password@localhost/dbname"
|
|
84
137
|
|
|
85
138
|
# Initialize database tables (SQLite)
|
|
86
|
-
narrator
|
|
139
|
+
uv run narrator init --database-url "sqlite+aiosqlite:///path/to/your/database.db"
|
|
87
140
|
|
|
88
141
|
# Check database status
|
|
89
|
-
narrator
|
|
142
|
+
uv run narrator status --database-url "postgresql+asyncpg://user:password@localhost/dbname"
|
|
90
143
|
```
|
|
91
144
|
|
|
92
145
|
You can also use environment variables instead of passing the database URL:
|
|
@@ -96,8 +149,8 @@ You can also use environment variables instead of passing the database URL:
|
|
|
96
149
|
export NARRATOR_DATABASE_URL="postgresql+asyncpg://user:password@localhost/dbname"
|
|
97
150
|
|
|
98
151
|
# Then run without --database-url flag
|
|
99
|
-
narrator
|
|
100
|
-
narrator
|
|
152
|
+
uv run narrator init
|
|
153
|
+
uv run narrator status
|
|
101
154
|
```
|
|
102
155
|
|
|
103
156
|
### Environment Variables
|
|
@@ -382,22 +435,22 @@ The Narrator includes a CLI tool for database management:
|
|
|
382
435
|
|
|
383
436
|
```bash
|
|
384
437
|
# Initialize database tables
|
|
385
|
-
narrator
|
|
438
|
+
uv run narrator init --database-url "postgresql+asyncpg://user:pass@localhost/dbname"
|
|
386
439
|
|
|
387
440
|
# Initialize using environment variable
|
|
388
441
|
export NARRATOR_DATABASE_URL="postgresql+asyncpg://user:pass@localhost/dbname"
|
|
389
|
-
narrator
|
|
442
|
+
uv run narrator init
|
|
390
443
|
|
|
391
444
|
# Check database status
|
|
392
|
-
narrator
|
|
445
|
+
uv run narrator status --database-url "postgresql+asyncpg://user:pass@localhost/dbname"
|
|
393
446
|
|
|
394
447
|
# Check status using environment variable
|
|
395
|
-
narrator
|
|
448
|
+
uv run narrator status
|
|
396
449
|
```
|
|
397
450
|
|
|
398
451
|
Available commands:
|
|
399
|
-
- `narrator
|
|
400
|
-
- `narrator
|
|
452
|
+
- `uv run narrator init` - Initialize database tables
|
|
453
|
+
- `uv run narrator status` - Check database connection and basic statistics
|
|
401
454
|
|
|
402
455
|
## Key Design Principles
|
|
403
456
|
|
|
@@ -480,7 +533,7 @@ uv run pytest tests/ -v
|
|
|
480
533
|
### Test Requirements
|
|
481
534
|
|
|
482
535
|
The test suite requires:
|
|
483
|
-
- Python 3.
|
|
536
|
+
- Python 3.13+
|
|
484
537
|
- pytest with async support
|
|
485
538
|
- Test coverage reporting
|
|
486
539
|
- System dependencies (libmagic for file type detection)
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
narrator/__init__.py,sha256=
|
|
1
|
+
narrator/__init__.py,sha256=xNe6Xc5aX4gZbtgxEq8k93iUfr62z2dIFsrVScEDIAk,403
|
|
2
2
|
narrator/database/__init__.py,sha256=UngOnFqImCeJiMZlMasm72mC4-UnJDDvfu1MNQLkRA8,189
|
|
3
|
-
narrator/database/cli.py,sha256=
|
|
3
|
+
narrator/database/cli.py,sha256=QvET17X5kLZ7GiOTw0b80-u4FuI-tOTu4SjAqCBkiSs,8355
|
|
4
4
|
narrator/database/models.py,sha256=wsG_5GrPo41hAcojjZTZmSx6bijea-Skan-DwzHs8os,2607
|
|
5
|
-
narrator/database/storage_backend.py,sha256=
|
|
5
|
+
narrator/database/storage_backend.py,sha256=y0bPNNHpIYInLTXwncY7LMU7B6zWQE3zcPanmQXa1XM,27481
|
|
6
6
|
narrator/database/thread_store.py,sha256=vMIPDdwuSpTyPogEUmxGcILxM_r1wxoQBUOn8XJpdqM,11301
|
|
7
7
|
narrator/database/migrations/__init__.py,sha256=IqoSL8eCcbcOtn96u2_TTrNG0KN1Jn1yreDZEO4RsnM,173
|
|
8
8
|
narrator/models/__init__.py,sha256=J8Rsv2lmfGR5QmUjoAPEFTSQt5TGtyrBynnp17HdZnU,179
|
|
@@ -10,11 +10,11 @@ narrator/models/attachment.py,sha256=6fZnGla_Ahgc4Kro2bHBTWoF_Kr-mUBHzONizVH73oc
|
|
|
10
10
|
narrator/models/message.py,sha256=-e0WzT5cJMrh7dDQgofHkHz0-z2KF4fHhe8nk9iG_OQ,21144
|
|
11
11
|
narrator/models/thread.py,sha256=4HKnCW8MkF52vYA6FQga1awxMA7OPjxOZL4QBcXpYOo,19218
|
|
12
12
|
narrator/storage/__init__.py,sha256=K4cxGITSQoQiw32QOWZsCBm11fwDTbsyzHGeAqcL6yY,101
|
|
13
|
-
narrator/storage/file_store.py,sha256
|
|
13
|
+
narrator/storage/file_store.py,sha256=m2btUQcbqpHbWm-htPe1_zwcGRmFXatmS_m9rB9ac2U,19858
|
|
14
14
|
narrator/utils/__init__.py,sha256=P4BhLvBJbBvb8qha2tTZPlYbjCRXth_K97f4vNc77UI,109
|
|
15
|
-
narrator/utils/logging.py,sha256=
|
|
16
|
-
slide_narrator-1.0.
|
|
17
|
-
slide_narrator-1.0.
|
|
18
|
-
slide_narrator-1.0.
|
|
19
|
-
slide_narrator-1.0.
|
|
20
|
-
slide_narrator-1.0.
|
|
15
|
+
narrator/utils/logging.py,sha256=Hm6D4VX03e28UCkNS1pCOXnYQKHQ2nz_PvZX8h-wLgg,1807
|
|
16
|
+
slide_narrator-1.0.2.dist-info/METADATA,sha256=MBGv40hmJ_tUlCEWgjKogzQWEpef4u9G0gs5RjSIHjA,16514
|
|
17
|
+
slide_narrator-1.0.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
18
|
+
slide_narrator-1.0.2.dist-info/entry_points.txt,sha256=5Oa53AERvPVdrEvsdWbY85xfzAGayOqq_P4KEmf1khA,56
|
|
19
|
+
slide_narrator-1.0.2.dist-info/licenses/LICENSE,sha256=g6cGasroU9sqSOjThWg14w0BMlwZhgmOQQVTiu036ks,1068
|
|
20
|
+
slide_narrator-1.0.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|