tokenator 0.1.3__tar.gz → 0.1.5__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- {tokenator-0.1.3 → tokenator-0.1.5}/PKG-INFO +1 -1
- {tokenator-0.1.3 → tokenator-0.1.5}/pyproject.toml +1 -1
- {tokenator-0.1.3 → tokenator-0.1.5}/src/tokenator/__init__.py +4 -6
- {tokenator-0.1.3 → tokenator-0.1.5}/src/tokenator/create_migrations.py +9 -5
- {tokenator-0.1.3 → tokenator-0.1.5}/src/tokenator/migrations/env.py +1 -1
- tokenator-0.1.5/src/tokenator/migrations/versions/f6f1f2437513_initial_migration.py +49 -0
- tokenator-0.1.5/src/tokenator/migrations.py +39 -0
- {tokenator-0.1.3 → tokenator-0.1.5}/src/tokenator/schemas.py +1 -1
- {tokenator-0.1.3 → tokenator-0.1.5}/src/tokenator/utils.py +6 -5
- tokenator-0.1.3/src/tokenator/migrations.py +0 -38
- {tokenator-0.1.3 → tokenator-0.1.5}/LICENSE +0 -0
- {tokenator-0.1.3 → tokenator-0.1.5}/README.md +0 -0
- {tokenator-0.1.3 → tokenator-0.1.5}/src/tokenator/base_wrapper.py +0 -0
- {tokenator-0.1.3 → tokenator-0.1.5}/src/tokenator/client_anthropic.py +0 -0
- {tokenator-0.1.3 → tokenator-0.1.5}/src/tokenator/client_openai.py +0 -0
- {tokenator-0.1.3 → tokenator-0.1.5}/src/tokenator/migrations/script.py.mako +0 -0
- {tokenator-0.1.3 → tokenator-0.1.5}/src/tokenator/models.py +0 -0
- {tokenator-0.1.3 → tokenator-0.1.5}/src/tokenator/usage.py +0 -0
@@ -6,15 +6,13 @@ from . import usage
|
|
6
6
|
from .utils import get_default_db_path, is_colab
|
7
7
|
|
8
8
|
__version__ = "0.1.0"
|
9
|
-
__all__ = ["OpenAIWrapper", "usage", "get_default_db_path"]
|
9
|
+
__all__ = ["OpenAIWrapper", "usage", "get_default_db_path", "is_colab"]
|
10
10
|
|
11
11
|
logger = logging.getLogger(__name__)
|
12
12
|
|
13
13
|
try:
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
else:
|
18
|
-
logger.info("Running in Colab environment - skipping migrations")
|
14
|
+
# Always run migrations, even in Colab
|
15
|
+
from .migrations import check_and_run_migrations
|
16
|
+
check_and_run_migrations()
|
19
17
|
except Exception as e:
|
20
18
|
logger.warning(f"Failed to run migrations, but continuing anyway: {e}")
|
@@ -1,11 +1,12 @@
|
|
1
1
|
"""Development utilities for tokenator."""
|
2
2
|
|
3
3
|
import os
|
4
|
+
import sys
|
4
5
|
from pathlib import Path
|
5
6
|
from alembic import command
|
6
7
|
from tokenator.migrations import get_alembic_config
|
7
8
|
|
8
|
-
def create_migration():
|
9
|
+
def create_migration(message: str):
|
9
10
|
"""Create a new migration based on model changes."""
|
10
11
|
config = get_alembic_config()
|
11
12
|
|
@@ -13,9 +14,12 @@ def create_migration():
|
|
13
14
|
migrations_dir = Path(__file__).parent / "migrations" / "versions"
|
14
15
|
migrations_dir.mkdir(parents=True, exist_ok=True)
|
15
16
|
|
16
|
-
# Generate migration
|
17
|
-
command.revision(config, autogenerate=True, message=
|
18
|
-
|
17
|
+
# Generate migration with custom message
|
18
|
+
command.revision(config, autogenerate=True, message=message)
|
19
19
|
|
20
20
|
if __name__ == "__main__":
|
21
|
-
|
21
|
+
if len(sys.argv) > 1:
|
22
|
+
msg = " ".join(sys.argv[1:])
|
23
|
+
else:
|
24
|
+
msg = "auto generated migration"
|
25
|
+
create_migration(msg)
|
@@ -0,0 +1,49 @@
|
|
1
|
+
"""Initial migration
|
2
|
+
|
3
|
+
Revision ID: f6f1f2437513
|
4
|
+
Revises:
|
5
|
+
Create Date: 2024-12-21 17:33:27.187221
|
6
|
+
|
7
|
+
"""
|
8
|
+
from typing import Sequence, Union
|
9
|
+
|
10
|
+
from alembic import op
|
11
|
+
import sqlalchemy as sa
|
12
|
+
|
13
|
+
|
14
|
+
# revision identifiers, used by Alembic.
|
15
|
+
revision: str = 'f6f1f2437513'
|
16
|
+
down_revision: Union[str, None] = None
|
17
|
+
branch_labels: Union[str, Sequence[str], None] = None
|
18
|
+
depends_on: Union[str, Sequence[str], None] = None
|
19
|
+
|
20
|
+
|
21
|
+
def upgrade() -> None:
|
22
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
23
|
+
op.create_table('token_usage',
|
24
|
+
sa.Column('id', sa.Integer(), nullable=False),
|
25
|
+
sa.Column('execution_id', sa.String(), nullable=False),
|
26
|
+
sa.Column('provider', sa.String(), nullable=False),
|
27
|
+
sa.Column('model', sa.String(), nullable=False),
|
28
|
+
sa.Column('created_at', sa.DateTime(), nullable=False),
|
29
|
+
sa.Column('updated_at', sa.DateTime(), nullable=False),
|
30
|
+
sa.Column('prompt_tokens', sa.Integer(), nullable=False),
|
31
|
+
sa.Column('completion_tokens', sa.Integer(), nullable=False),
|
32
|
+
sa.Column('total_tokens', sa.Integer(), nullable=False),
|
33
|
+
sa.PrimaryKeyConstraint('id')
|
34
|
+
)
|
35
|
+
op.create_index('idx_created_at', 'token_usage', ['created_at'], unique=False)
|
36
|
+
op.create_index('idx_execution_id', 'token_usage', ['execution_id'], unique=False)
|
37
|
+
op.create_index('idx_model', 'token_usage', ['model'], unique=False)
|
38
|
+
op.create_index('idx_provider', 'token_usage', ['provider'], unique=False)
|
39
|
+
# ### end Alembic commands ###
|
40
|
+
|
41
|
+
|
42
|
+
def downgrade() -> None:
|
43
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
44
|
+
op.drop_index('idx_provider', table_name='token_usage')
|
45
|
+
op.drop_index('idx_model', table_name='token_usage')
|
46
|
+
op.drop_index('idx_execution_id', table_name='token_usage')
|
47
|
+
op.drop_index('idx_created_at', table_name='token_usage')
|
48
|
+
op.drop_table('token_usage')
|
49
|
+
# ### end Alembic commands ###
|
@@ -0,0 +1,39 @@
|
|
1
|
+
"""Database migration utilities for tokenator."""
|
2
|
+
|
3
|
+
import os
|
4
|
+
from pathlib import Path
|
5
|
+
from alembic.config import Config
|
6
|
+
from alembic import command
|
7
|
+
from .utils import get_default_db_path
|
8
|
+
|
9
|
+
def get_alembic_config(db_path: str = None) -> Config:
|
10
|
+
"""Get Alembic config for migrations."""
|
11
|
+
if db_path is None:
|
12
|
+
db_path = get_default_db_path()
|
13
|
+
|
14
|
+
# Get the directory containing this file
|
15
|
+
migrations_dir = Path(__file__).parent / "migrations"
|
16
|
+
|
17
|
+
# Create Config object
|
18
|
+
config = Config()
|
19
|
+
config.set_main_option("script_location", str(migrations_dir))
|
20
|
+
config.set_main_option("sqlalchemy.url", f"sqlite:///{db_path}")
|
21
|
+
|
22
|
+
return config
|
23
|
+
|
24
|
+
def check_and_run_migrations(db_path: str = None):
|
25
|
+
"""Check and run any pending database migrations."""
|
26
|
+
if db_path is None:
|
27
|
+
db_path = get_default_db_path()
|
28
|
+
|
29
|
+
dirname = os.path.dirname(db_path)
|
30
|
+
if dirname:
|
31
|
+
os.makedirs(dirname, exist_ok=True)
|
32
|
+
|
33
|
+
# Initialize database
|
34
|
+
import sqlite3
|
35
|
+
conn = sqlite3.connect(db_path)
|
36
|
+
conn.close()
|
37
|
+
|
38
|
+
config = get_alembic_config(db_path)
|
39
|
+
command.upgrade(config, "head")
|
@@ -24,7 +24,7 @@ def get_engine(db_path: str = None):
|
|
24
24
|
def get_session(db_path: str = None):
|
25
25
|
"""Create a thread-safe session factory."""
|
26
26
|
engine = get_engine(db_path)
|
27
|
-
Base.metadata.create_all(engine)
|
27
|
+
# Base.metadata.create_all(engine)
|
28
28
|
session_factory = sessionmaker(bind=engine)
|
29
29
|
return scoped_session(session_factory)
|
30
30
|
|
@@ -18,13 +18,14 @@ def is_colab() -> bool:
|
|
18
18
|
|
19
19
|
def get_default_db_path() -> str:
|
20
20
|
"""Get the platform-specific default database path."""
|
21
|
-
system = platform.system().lower()
|
22
|
-
|
23
21
|
try:
|
24
22
|
if is_colab():
|
25
|
-
#
|
26
|
-
|
27
|
-
|
23
|
+
# Use in-memory database for Colab
|
24
|
+
return "usage.db"
|
25
|
+
|
26
|
+
system = platform.system().lower()
|
27
|
+
|
28
|
+
if system == "linux" or system == "darwin":
|
28
29
|
# Follow XDG Base Directory Specification
|
29
30
|
xdg_data_home = os.environ.get("XDG_DATA_HOME", "")
|
30
31
|
if not xdg_data_home:
|
@@ -1,38 +0,0 @@
|
|
1
|
-
"""Automatic database migrations manager."""
|
2
|
-
|
3
|
-
import os
|
4
|
-
import logging
|
5
|
-
from pathlib import Path
|
6
|
-
from alembic import command
|
7
|
-
from alembic.config import Config
|
8
|
-
from alembic.runtime.migration import MigrationContext
|
9
|
-
from sqlalchemy import create_engine
|
10
|
-
|
11
|
-
from .utils import get_default_db_path
|
12
|
-
|
13
|
-
logger = logging.getLogger(__name__)
|
14
|
-
|
15
|
-
def check_and_run_migrations():
|
16
|
-
"""Check and run any pending database migrations."""
|
17
|
-
try:
|
18
|
-
db_path = get_default_db_path()
|
19
|
-
engine = create_engine(f"sqlite:///{db_path}")
|
20
|
-
|
21
|
-
# Create migrations table if it doesn't exist
|
22
|
-
with engine.connect() as conn:
|
23
|
-
context = MigrationContext.configure(conn)
|
24
|
-
current_rev = context.get_current_revision()
|
25
|
-
|
26
|
-
if current_rev is None:
|
27
|
-
# Run migrations
|
28
|
-
config = Config()
|
29
|
-
migrations_dir = os.path.join(os.path.dirname(__file__), "migrations")
|
30
|
-
config.set_main_option("script_location", migrations_dir)
|
31
|
-
config.set_main_option("sqlalchemy.url", f"sqlite:///{db_path}")
|
32
|
-
|
33
|
-
command.upgrade(config, "head")
|
34
|
-
logger.info("Database migrations completed successfully")
|
35
|
-
except Exception as e:
|
36
|
-
logger.error(f"Failed to run migrations: {e}")
|
37
|
-
# Don't raise the exception - allow the application to continue
|
38
|
-
# The user can manually initialize the DB later if needed
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|