slide-narrator 1.0.1__py3-none-any.whl → 1.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of slide-narrator might be problematic. Click here for more details.

narrator/__init__.py CHANGED
@@ -8,7 +8,7 @@ from .models.thread import Thread
8
8
  from .models.message import Message
9
9
  from .models.attachment import Attachment
10
10
 
11
- __version__ = "1.0.1"
11
+ __version__ = "1.0.2"
12
12
  __all__ = [
13
13
  "ThreadStore",
14
14
  "FileStore",
@@ -1,6 +1,7 @@
1
1
  """Storage backend implementations for ThreadStore."""
2
2
  from abc import ABC, abstractmethod
3
3
  from typing import List, Optional, Dict, Any, Union
4
+ import re
4
5
  from datetime import datetime, UTC
5
6
  import json
6
7
  import os
@@ -20,6 +21,12 @@ from .models import Base, ThreadRecord, MessageRecord
20
21
 
21
22
  logger = get_logger(__name__)
22
23
 
24
+ def _sanitize_key(component: str) -> str:
25
+ """Allow only alphanumeric and underscore for JSON path keys to avoid SQL injection."""
26
+ if not re.fullmatch(r"[A-Za-z0-9_]+", component):
27
+ raise ValueError(f"Invalid key component: {component}")
28
+ return component
29
+
23
30
  class StorageBackend(ABC):
24
31
  """Abstract base class for thread storage backends."""
25
32
 
@@ -149,28 +156,22 @@ class MemoryBackend(StorageBackend):
149
156
  Returns:
150
157
  List of messages matching the criteria (possibly empty)
151
158
  """
159
+ matches: List[Message] = []
152
160
  # Traverse all threads and messages
153
161
  for thread in self._threads.values():
154
162
  for message in thread.messages:
155
- # Use the path to navigate to the target attribute
156
- current = message.model_dump(mode="python")
157
-
163
+ current: Any = message.model_dump(mode="python")
158
164
  # Navigate the nested structure
159
- parts = path.split('.')
165
+ parts = [p for p in path.split('.') if p]
160
166
  for part in parts:
161
167
  if isinstance(current, dict) and part in current:
162
168
  current = current[part]
163
169
  else:
164
170
  current = None
165
171
  break
166
-
167
- # Check if we found a match
168
172
  if current == value:
169
- # For MemoryBackend, we can't return MessageRecord objects
170
- # Return a list containing the message data that the ThreadStore can handle
171
- return [message]
172
-
173
- return []
173
+ matches.append(message)
174
+ return matches
174
175
 
175
176
  class SQLBackend(StorageBackend):
176
177
  """SQL storage backend supporting both SQLite and PostgreSQL with proper connection pooling."""
@@ -437,7 +438,17 @@ class SQLBackend(StorageBackend):
437
438
  for key, value in attributes.items():
438
439
  if self.database_url.startswith('sqlite'):
439
440
  # Use SQLite json_extract
440
- query = query.where(text(f"json_extract(attributes, '$.{key}') = :value").bindparams(value=str(value)))
441
+ safe_key = _sanitize_key(key)
442
+ if value is None:
443
+ query = query.where(text(f"json_extract(attributes, '$.{safe_key}') IS NULL"))
444
+ elif isinstance(value, bool):
445
+ # SQLite stores booleans as 1/0
446
+ num_val = 1 if value else 0
447
+ query = query.where(text(f"json_extract(attributes, '$.{safe_key}') = {num_val}"))
448
+ else:
449
+ query = query.where(
450
+ text(f"json_extract(attributes, '$.{safe_key}') = :value").bindparams(value=str(value))
451
+ )
441
452
  else:
442
453
  # Use PostgreSQL JSONB operators via text() for direct SQL control
443
454
  logger.info(f"Searching for attribute[{key}] = {value} (type: {type(value)})")
@@ -445,7 +456,8 @@ class SQLBackend(StorageBackend):
445
456
  # Handle different value types appropriately
446
457
  if value is None:
447
458
  # Check for null/None values
448
- query = query.where(text(f"attributes->>'{key}' IS NULL"))
459
+ safe_key = _sanitize_key(key)
460
+ query = query.where(text(f"attributes->>'{safe_key}' IS NULL"))
449
461
  else:
450
462
  # Convert value to string for text comparison
451
463
  str_value = str(value)
@@ -454,10 +466,11 @@ class SQLBackend(StorageBackend):
454
466
  str_value = str(value).lower()
455
467
 
456
468
  # Use PostgreSQL's JSONB operators for direct string comparison
457
- param_name = f"attr_{key}"
469
+ safe_key = _sanitize_key(key)
470
+ param_name = f"attr_{safe_key}"
458
471
  bp = bindparam(param_name, str_value)
459
472
  query = query.where(
460
- text(f"attributes->>'{key}' = :{param_name}").bindparams(bp)
473
+ text(f"attributes->>'{safe_key}' = :{param_name}").bindparams(bp)
461
474
  )
462
475
 
463
476
  # Log the final query for debugging
@@ -481,28 +494,38 @@ class SQLBackend(StorageBackend):
481
494
 
482
495
  if self.database_url.startswith('sqlite'):
483
496
  # Use SQLite json_extract for platform name
484
- query = query.where(text(f"json_extract(platforms, '$.{platform_name}') IS NOT NULL"))
497
+ safe_platform = _sanitize_key(platform_name)
498
+ query = query.where(text(f"json_extract(platforms, '$.{safe_platform}') IS NOT NULL"))
485
499
  # Add property conditions
486
500
  for key, value in properties.items():
487
501
  # Convert value to string for text comparison
488
- str_value = str(value)
489
- param_name = f"value_{platform_name}_{key}" # Ensure unique param name
490
- bp = bindparam(param_name, str_value)
491
- query = query.where(
492
- text(f"json_extract(platforms, '$.{platform_name}.{key}') = :{param_name}")
493
- .bindparams(bp)
494
- )
502
+ safe_key = _sanitize_key(key)
503
+ if value is None:
504
+ query = query.where(text(f"json_extract(platforms, '$.{safe_platform}.{safe_key}') IS NULL"))
505
+ elif isinstance(value, bool):
506
+ num_val = 1 if value else 0
507
+ query = query.where(text(f"json_extract(platforms, '$.{safe_platform}.{safe_key}') = {num_val}"))
508
+ else:
509
+ str_value = str(value)
510
+ param_name = f"value_{safe_platform}_{safe_key}" # Ensure unique param name
511
+ bp = bindparam(param_name, str_value)
512
+ query = query.where(
513
+ text(f"json_extract(platforms, '$.{safe_platform}.{safe_key}') = :{param_name}")
514
+ .bindparams(bp)
515
+ )
495
516
  else:
496
517
  # Use PostgreSQL JSONB operators for platform checks
497
- query = query.where(text(f"platforms ? '{platform_name}'"))
518
+ safe_platform = _sanitize_key(platform_name)
519
+ query = query.where(text(f"platforms ? '{safe_platform}'"))
498
520
 
499
521
  # Add property conditions with text() for proper PostgreSQL JSONB syntax
500
522
  for key, value in properties.items():
501
523
  str_value = str(value)
502
- param_name = f"value_{platform_name}_{key}"
524
+ safe_key = _sanitize_key(key)
525
+ param_name = f"value_{safe_platform}_{safe_key}"
503
526
  bp = bindparam(param_name, str_value)
504
527
  query = query.where(
505
- text(f"platforms->'{platform_name}'->>'{key}' = :{param_name}")
528
+ text(f"platforms->'{safe_platform}'->>'{safe_key}' = :{param_name}")
506
529
  .bindparams(bp)
507
530
  )
508
531
 
@@ -540,16 +563,33 @@ class SQLBackend(StorageBackend):
540
563
  try:
541
564
  query = select(MessageRecord)
542
565
 
566
+ # Normalize and sanitize path parts
567
+ parts = [p for p in path.split('.') if p]
568
+ parts = [_sanitize_key(p) for p in parts]
569
+ if not parts:
570
+ return []
571
+ # Support paths prefixed with 'source.' by stripping the leading component
572
+ if parts and parts[0] == 'source':
573
+ parts = parts[1:]
574
+ if not parts:
575
+ return []
543
576
  if self.database_url.startswith('sqlite'):
544
- # Use SQLite json_extract
545
- json_path = '$.' + path.replace('.', '.')
546
- query = query.where(text(f"json_extract(source, '{json_path}') = :value").bindparams(value=str(value)))
577
+ # Use SQLite json_extract with a proper JSON path: $.a.b.c (safe due to sanitized parts)
578
+ json_path = '$.' + '.'.join(parts)
579
+ query = query.where(
580
+ text(f"json_extract(source, '{json_path}') = :value").bindparams(value=str(value))
581
+ )
547
582
  else:
548
- # Use PostgreSQL JSONB operators
549
- # Convert dot notation to PostgreSQL JSON path
550
- path_parts = path.split('.')
551
- json_path = '->'.join([f"'{part}'" for part in path_parts[:-1]]) + f"->>'{path_parts[-1]}'"
552
- query = query.where(text(f"source{json_path} = :value").bindparams(value=str(value)))
583
+ # Use PostgreSQL JSONB operators: source->'a'->'b'->>'c' (last part text)
584
+ if len(parts) == 1:
585
+ pg_expr = f"source->>'{parts[0]}'"
586
+ else:
587
+ head = parts[:-1]
588
+ tail = parts[-1]
589
+ pg_expr = "source" + ''.join([f"->'{h}'" for h in head]) + f"->>'{tail}'"
590
+ query = query.where(
591
+ text(f"{pg_expr} = :value").bindparams(value=str(value))
592
+ )
553
593
 
554
594
  result = await session.execute(query)
555
595
  return result.scalars().all()
@@ -494,13 +494,7 @@ class FileStore:
494
494
  files.append(file_id)
495
495
  return files
496
496
 
497
- def _handle_data_url(self, content: bytes) -> bytes:
498
- """Handle data URLs"""
499
- if self.content.startswith('data:'):
500
- # Handle data URLs
501
- header, encoded = self.content.split(",", 1)
502
- return base64.b64decode(encoded)
503
- return content
497
+ # Note: data URL handling is performed at the Attachment layer where the content type is known.
504
498
 
505
499
  @classmethod
506
500
  def get_base_path(cls) -> str:
narrator/utils/logging.py CHANGED
@@ -3,36 +3,30 @@ import os
3
3
  import logging
4
4
  from typing import Optional
5
5
 
6
+ class _NarratorNullHandler(logging.Handler):
7
+ def emit(self, record):
8
+ pass
9
+
6
10
  _is_configured = False
7
11
 
8
12
  def _ensure_logging_configured():
9
- """Internal function to configure logging if not already configured."""
13
+ """Attach a NullHandler and optionally set level based on env without overriding app config."""
10
14
  global _is_configured
11
15
  if _is_configured:
12
16
  return
13
17
 
14
- # Get log level from environment and convert to uppercase
15
- log_level_str = os.getenv('NARRATOR_LOG_LEVEL', os.getenv('LOG_LEVEL', 'INFO')).upper()
16
-
17
- # Convert string to logging level constant
18
- try:
19
- log_level = getattr(logging, log_level_str)
20
- except AttributeError:
21
- print(f"Invalid LOG_LEVEL: {log_level_str}. Defaulting to INFO.")
22
- log_level = logging.INFO
23
-
24
- # Configure the root logger with our format
25
- logging.basicConfig(
26
- level=log_level,
27
- format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
28
- datefmt='%H:%M:%S',
29
- force=True # Ensure we override any existing configuration
30
- )
31
-
32
- # Get the root logger and set its level
33
- root_logger = logging.getLogger()
34
- root_logger.setLevel(log_level)
35
-
18
+ logger = logging.getLogger('narrator')
19
+ # Avoid duplicate handlers
20
+ if not any(isinstance(h, _NarratorNullHandler) for h in logger.handlers):
21
+ logger.addHandler(_NarratorNullHandler())
22
+
23
+ # Respect env level but do not call basicConfig or force reconfigure
24
+ log_level_str = os.getenv('NARRATOR_LOG_LEVEL', os.getenv('LOG_LEVEL', '')).upper()
25
+ if log_level_str:
26
+ level = getattr(logging, log_level_str, None)
27
+ if isinstance(level, int):
28
+ logger.setLevel(level)
29
+
36
30
  _is_configured = True
37
31
 
38
32
  def get_logger(name: Optional[str] = None) -> logging.Logger:
@@ -55,4 +49,4 @@ def get_logger(name: Optional[str] = None) -> logging.Logger:
55
49
  logger.debug("Debug message") # Will respect NARRATOR_LOG_LEVEL from .env
56
50
  """
57
51
  _ensure_logging_configured()
58
- return logging.getLogger(name or '__name__')
52
+ return logging.getLogger(name or 'narrator.unknown')
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: slide-narrator
3
- Version: 1.0.1
3
+ Version: 1.0.2
4
4
  Summary: Thread and file storage components for conversational AI - the companion to Tyler AI framework
5
5
  Project-URL: Homepage, https://github.com/adamwdraper/slide
6
6
  Project-URL: Repository, https://github.com/adamwdraper/slide
@@ -15,6 +15,7 @@ Classifier: Operating System :: OS Independent
15
15
  Classifier: Programming Language :: Python :: 3
16
16
  Classifier: Programming Language :: Python :: 3.11
17
17
  Classifier: Programming Language :: Python :: 3.12
18
+ Classifier: Programming Language :: Python :: 3.13
18
19
  Requires-Python: >=3.11
19
20
  Requires-Dist: aiosqlite>=0.21.0
20
21
  Requires-Dist: alembic>=1.14.1
@@ -25,7 +26,6 @@ Requires-Dist: pydantic>=2.10.4
25
26
  Requires-Dist: pypdf>=5.3.0
26
27
  Requires-Dist: python-magic>=0.4.0
27
28
  Requires-Dist: sqlalchemy>=2.0.36
28
- Requires-Dist: uuid-utils>=0.10.0
29
29
  Provides-Extra: dev
30
30
  Requires-Dist: coverage>=7.6.10; extra == 'dev'
31
31
  Requires-Dist: pytest-asyncio>=0.25.2; extra == 'dev'
@@ -533,7 +533,7 @@ uv run pytest tests/ -v
533
533
  ### Test Requirements
534
534
 
535
535
  The test suite requires:
536
- - Python 3.12+
536
+ - Python 3.13+
537
537
  - pytest with async support
538
538
  - Test coverage reporting
539
539
  - System dependencies (libmagic for file type detection)
@@ -1,8 +1,8 @@
1
- narrator/__init__.py,sha256=ZitqLYs56vOVMMvnLwbE4AFC_ZK31cC97BVH8Ei-QBs,403
1
+ narrator/__init__.py,sha256=xNe6Xc5aX4gZbtgxEq8k93iUfr62z2dIFsrVScEDIAk,403
2
2
  narrator/database/__init__.py,sha256=UngOnFqImCeJiMZlMasm72mC4-UnJDDvfu1MNQLkRA8,189
3
3
  narrator/database/cli.py,sha256=QvET17X5kLZ7GiOTw0b80-u4FuI-tOTu4SjAqCBkiSs,8355
4
4
  narrator/database/models.py,sha256=wsG_5GrPo41hAcojjZTZmSx6bijea-Skan-DwzHs8os,2607
5
- narrator/database/storage_backend.py,sha256=UeMgxW8h3ZNWORZNH_f-jZuHNjHpREBaOOAFPeEPlvA,25444
5
+ narrator/database/storage_backend.py,sha256=y0bPNNHpIYInLTXwncY7LMU7B6zWQE3zcPanmQXa1XM,27481
6
6
  narrator/database/thread_store.py,sha256=vMIPDdwuSpTyPogEUmxGcILxM_r1wxoQBUOn8XJpdqM,11301
7
7
  narrator/database/migrations/__init__.py,sha256=IqoSL8eCcbcOtn96u2_TTrNG0KN1Jn1yreDZEO4RsnM,173
8
8
  narrator/models/__init__.py,sha256=J8Rsv2lmfGR5QmUjoAPEFTSQt5TGtyrBynnp17HdZnU,179
@@ -10,11 +10,11 @@ narrator/models/attachment.py,sha256=6fZnGla_Ahgc4Kro2bHBTWoF_Kr-mUBHzONizVH73oc
10
10
  narrator/models/message.py,sha256=-e0WzT5cJMrh7dDQgofHkHz0-z2KF4fHhe8nk9iG_OQ,21144
11
11
  narrator/models/thread.py,sha256=4HKnCW8MkF52vYA6FQga1awxMA7OPjxOZL4QBcXpYOo,19218
12
12
  narrator/storage/__init__.py,sha256=K4cxGITSQoQiw32QOWZsCBm11fwDTbsyzHGeAqcL6yY,101
13
- narrator/storage/file_store.py,sha256=-k1ZYzKYioCMiP7KfWuuCmCeAzDqRv38ndpuM75yISY,20047
13
+ narrator/storage/file_store.py,sha256=m2btUQcbqpHbWm-htPe1_zwcGRmFXatmS_m9rB9ac2U,19858
14
14
  narrator/utils/__init__.py,sha256=P4BhLvBJbBvb8qha2tTZPlYbjCRXth_K97f4vNc77UI,109
15
- narrator/utils/logging.py,sha256=K9EWI7lP4CNQpPwggiqzMex7oF6oyL3wIVLik2iuXd4,1983
16
- slide_narrator-1.0.1.dist-info/METADATA,sha256=ie1ZcCU3YGEjsDrYmBqOFbCoSuTzCGtTTUNKsOiUKP4,16497
17
- slide_narrator-1.0.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
18
- slide_narrator-1.0.1.dist-info/entry_points.txt,sha256=5Oa53AERvPVdrEvsdWbY85xfzAGayOqq_P4KEmf1khA,56
19
- slide_narrator-1.0.1.dist-info/licenses/LICENSE,sha256=g6cGasroU9sqSOjThWg14w0BMlwZhgmOQQVTiu036ks,1068
20
- slide_narrator-1.0.1.dist-info/RECORD,,
15
+ narrator/utils/logging.py,sha256=Hm6D4VX03e28UCkNS1pCOXnYQKHQ2nz_PvZX8h-wLgg,1807
16
+ slide_narrator-1.0.2.dist-info/METADATA,sha256=MBGv40hmJ_tUlCEWgjKogzQWEpef4u9G0gs5RjSIHjA,16514
17
+ slide_narrator-1.0.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
18
+ slide_narrator-1.0.2.dist-info/entry_points.txt,sha256=5Oa53AERvPVdrEvsdWbY85xfzAGayOqq_P4KEmf1khA,56
19
+ slide_narrator-1.0.2.dist-info/licenses/LICENSE,sha256=g6cGasroU9sqSOjThWg14w0BMlwZhgmOQQVTiu036ks,1068
20
+ slide_narrator-1.0.2.dist-info/RECORD,,