tokenator 0.1.3__py3-none-any.whl → 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tokenator/__init__.py +4 -6
- tokenator/create_migrations.py +9 -5
- tokenator/migrations/env.py +1 -1
- tokenator/migrations/versions/f6f1f2437513_initial_migration.py +49 -0
- tokenator/migrations.py +30 -29
- tokenator/schemas.py +1 -1
- tokenator/utils.py +6 -5
- {tokenator-0.1.3.dist-info → tokenator-0.1.5.dist-info}/METADATA +1 -1
- tokenator-0.1.5.dist-info/RECORD +17 -0
- tokenator-0.1.3.dist-info/RECORD +0 -16
- {tokenator-0.1.3.dist-info → tokenator-0.1.5.dist-info}/LICENSE +0 -0
- {tokenator-0.1.3.dist-info → tokenator-0.1.5.dist-info}/WHEEL +0 -0
tokenator/__init__.py
CHANGED
@@ -6,15 +6,13 @@ from . import usage
|
|
6
6
|
from .utils import get_default_db_path, is_colab
|
7
7
|
|
8
8
|
__version__ = "0.1.0"
|
9
|
-
__all__ = ["OpenAIWrapper", "usage", "get_default_db_path"]
|
9
|
+
__all__ = ["OpenAIWrapper", "usage", "get_default_db_path", "is_colab"]
|
10
10
|
|
11
11
|
logger = logging.getLogger(__name__)
|
12
12
|
|
13
13
|
try:
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
else:
|
18
|
-
logger.info("Running in Colab environment - skipping migrations")
|
14
|
+
# Always run migrations, even in Colab
|
15
|
+
from .migrations import check_and_run_migrations
|
16
|
+
check_and_run_migrations()
|
19
17
|
except Exception as e:
|
20
18
|
logger.warning(f"Failed to run migrations, but continuing anyway: {e}")
|
tokenator/create_migrations.py
CHANGED
@@ -1,11 +1,12 @@
|
|
1
1
|
"""Development utilities for tokenator."""
|
2
2
|
|
3
3
|
import os
|
4
|
+
import sys
|
4
5
|
from pathlib import Path
|
5
6
|
from alembic import command
|
6
7
|
from tokenator.migrations import get_alembic_config
|
7
8
|
|
8
|
-
def create_migration():
|
9
|
+
def create_migration(message: str):
|
9
10
|
"""Create a new migration based on model changes."""
|
10
11
|
config = get_alembic_config()
|
11
12
|
|
@@ -13,9 +14,12 @@ def create_migration():
|
|
13
14
|
migrations_dir = Path(__file__).parent / "migrations" / "versions"
|
14
15
|
migrations_dir.mkdir(parents=True, exist_ok=True)
|
15
16
|
|
16
|
-
# Generate migration
|
17
|
-
command.revision(config, autogenerate=True, message=
|
18
|
-
|
17
|
+
# Generate migration with custom message
|
18
|
+
command.revision(config, autogenerate=True, message=message)
|
19
19
|
|
20
20
|
if __name__ == "__main__":
|
21
|
-
|
21
|
+
if len(sys.argv) > 1:
|
22
|
+
msg = " ".join(sys.argv[1:])
|
23
|
+
else:
|
24
|
+
msg = "auto generated migration"
|
25
|
+
create_migration(msg)
|
tokenator/migrations/env.py
CHANGED
@@ -0,0 +1,49 @@
|
|
1
|
+
"""Initial migration
|
2
|
+
|
3
|
+
Revision ID: f6f1f2437513
|
4
|
+
Revises:
|
5
|
+
Create Date: 2024-12-21 17:33:27.187221
|
6
|
+
|
7
|
+
"""
|
8
|
+
from typing import Sequence, Union
|
9
|
+
|
10
|
+
from alembic import op
|
11
|
+
import sqlalchemy as sa
|
12
|
+
|
13
|
+
|
14
|
+
# revision identifiers, used by Alembic.
|
15
|
+
revision: str = 'f6f1f2437513'
|
16
|
+
down_revision: Union[str, None] = None
|
17
|
+
branch_labels: Union[str, Sequence[str], None] = None
|
18
|
+
depends_on: Union[str, Sequence[str], None] = None
|
19
|
+
|
20
|
+
|
21
|
+
def upgrade() -> None:
|
22
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
23
|
+
op.create_table('token_usage',
|
24
|
+
sa.Column('id', sa.Integer(), nullable=False),
|
25
|
+
sa.Column('execution_id', sa.String(), nullable=False),
|
26
|
+
sa.Column('provider', sa.String(), nullable=False),
|
27
|
+
sa.Column('model', sa.String(), nullable=False),
|
28
|
+
sa.Column('created_at', sa.DateTime(), nullable=False),
|
29
|
+
sa.Column('updated_at', sa.DateTime(), nullable=False),
|
30
|
+
sa.Column('prompt_tokens', sa.Integer(), nullable=False),
|
31
|
+
sa.Column('completion_tokens', sa.Integer(), nullable=False),
|
32
|
+
sa.Column('total_tokens', sa.Integer(), nullable=False),
|
33
|
+
sa.PrimaryKeyConstraint('id')
|
34
|
+
)
|
35
|
+
op.create_index('idx_created_at', 'token_usage', ['created_at'], unique=False)
|
36
|
+
op.create_index('idx_execution_id', 'token_usage', ['execution_id'], unique=False)
|
37
|
+
op.create_index('idx_model', 'token_usage', ['model'], unique=False)
|
38
|
+
op.create_index('idx_provider', 'token_usage', ['provider'], unique=False)
|
39
|
+
# ### end Alembic commands ###
|
40
|
+
|
41
|
+
|
42
|
+
def downgrade() -> None:
|
43
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
44
|
+
op.drop_index('idx_provider', table_name='token_usage')
|
45
|
+
op.drop_index('idx_model', table_name='token_usage')
|
46
|
+
op.drop_index('idx_execution_id', table_name='token_usage')
|
47
|
+
op.drop_index('idx_created_at', table_name='token_usage')
|
48
|
+
op.drop_table('token_usage')
|
49
|
+
# ### end Alembic commands ###
|
tokenator/migrations.py
CHANGED
@@ -1,38 +1,39 @@
|
|
1
|
-
"""
|
1
|
+
"""Database migration utilities for tokenator."""
|
2
2
|
|
3
3
|
import os
|
4
|
-
import logging
|
5
4
|
from pathlib import Path
|
6
|
-
from alembic import command
|
7
5
|
from alembic.config import Config
|
8
|
-
from alembic
|
9
|
-
from sqlalchemy import create_engine
|
10
|
-
|
6
|
+
from alembic import command
|
11
7
|
from .utils import get_default_db_path
|
12
8
|
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
"""Check and run any pending database migrations."""
|
17
|
-
try:
|
9
|
+
def get_alembic_config(db_path: str = None) -> Config:
|
10
|
+
"""Get Alembic config for migrations."""
|
11
|
+
if db_path is None:
|
18
12
|
db_path = get_default_db_path()
|
19
|
-
engine = create_engine(f"sqlite:///{db_path}")
|
20
13
|
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
14
|
+
# Get the directory containing this file
|
15
|
+
migrations_dir = Path(__file__).parent / "migrations"
|
16
|
+
|
17
|
+
# Create Config object
|
18
|
+
config = Config()
|
19
|
+
config.set_main_option("script_location", str(migrations_dir))
|
20
|
+
config.set_main_option("sqlalchemy.url", f"sqlite:///{db_path}")
|
21
|
+
|
22
|
+
return config
|
25
23
|
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
24
|
+
def check_and_run_migrations(db_path: str = None):
|
25
|
+
"""Check and run any pending database migrations."""
|
26
|
+
if db_path is None:
|
27
|
+
db_path = get_default_db_path()
|
28
|
+
|
29
|
+
dirname = os.path.dirname(db_path)
|
30
|
+
if dirname:
|
31
|
+
os.makedirs(dirname, exist_ok=True)
|
32
|
+
|
33
|
+
# Initialize database
|
34
|
+
import sqlite3
|
35
|
+
conn = sqlite3.connect(db_path)
|
36
|
+
conn.close()
|
37
|
+
|
38
|
+
config = get_alembic_config(db_path)
|
39
|
+
command.upgrade(config, "head")
|
tokenator/schemas.py
CHANGED
@@ -24,7 +24,7 @@ def get_engine(db_path: str = None):
|
|
24
24
|
def get_session(db_path: str = None):
|
25
25
|
"""Create a thread-safe session factory."""
|
26
26
|
engine = get_engine(db_path)
|
27
|
-
Base.metadata.create_all(engine)
|
27
|
+
# Base.metadata.create_all(engine)
|
28
28
|
session_factory = sessionmaker(bind=engine)
|
29
29
|
return scoped_session(session_factory)
|
30
30
|
|
tokenator/utils.py
CHANGED
@@ -18,13 +18,14 @@ def is_colab() -> bool:
|
|
18
18
|
|
19
19
|
def get_default_db_path() -> str:
|
20
20
|
"""Get the platform-specific default database path."""
|
21
|
-
system = platform.system().lower()
|
22
|
-
|
23
21
|
try:
|
24
22
|
if is_colab():
|
25
|
-
#
|
26
|
-
|
27
|
-
|
23
|
+
# Use in-memory database for Colab
|
24
|
+
return "usage.db"
|
25
|
+
|
26
|
+
system = platform.system().lower()
|
27
|
+
|
28
|
+
if system == "linux" or system == "darwin":
|
28
29
|
# Follow XDG Base Directory Specification
|
29
30
|
xdg_data_home = os.environ.get("XDG_DATA_HOME", "")
|
30
31
|
if not xdg_data_home:
|
@@ -0,0 +1,17 @@
|
|
1
|
+
tokenator/__init__.py,sha256=YsjMbOFztjrOKWNyguFcadzPKwhj3uaw0gRJQkCoRaM,566
|
2
|
+
tokenator/base_wrapper.py,sha256=vSu_pStKYulho7_5g0jMCNf84KRxC4kTKep0v8YE61M,2377
|
3
|
+
tokenator/client_anthropic.py,sha256=1ejWIZBxtk-mWTVaKWeMUvS2hZ_Dn-vNKYa3yopdjAU,6714
|
4
|
+
tokenator/client_openai.py,sha256=1xZuRA90kwlflTwEuFkXJHHN584XTeNh1CfEBMLELbQ,6308
|
5
|
+
tokenator/create_migrations.py,sha256=n1OVbWrdwvBdaN-Aqqt1gLCPQidfoQfeJtGsab_epGk,746
|
6
|
+
tokenator/migrations/env.py,sha256=LR_hONDa8Saiq9CyNUpH8kZCi5PtXLaDlfABs_CePkk,1415
|
7
|
+
tokenator/migrations/script.py.mako,sha256=nJL-tbLQE0Qy4P9S4r4ntNAcikPtoFUlvXe6xvm9ot8,635
|
8
|
+
tokenator/migrations/versions/f6f1f2437513_initial_migration.py,sha256=DvHcjnREmUHZVX9q1e6PS4wNK_d4qGw-8pz0eS4_3mE,1860
|
9
|
+
tokenator/migrations.py,sha256=BFgZRsdIx-Qs_WwDaH6cyi2124mLf5hA8VrIlW7f7Mg,1134
|
10
|
+
tokenator/models.py,sha256=EprE_MMJxDS-YXlcIQLZzfekH7xTYbeOC3bx3B2osVw,1171
|
11
|
+
tokenator/schemas.py,sha256=V7NYfY9eZvH3J6uOwXJz4dSAU6WYzINRnfFi1wWsTcc,2280
|
12
|
+
tokenator/usage.py,sha256=aHjGwzDzaiVznahNk5HqVyk3IxDo5FtFVfOUCeE7DZ4,7833
|
13
|
+
tokenator/utils.py,sha256=BzfyWZkKt-2Jw_DCS7tY3iicBynklgvmf_1cPGe-OEI,1883
|
14
|
+
tokenator-0.1.5.dist-info/LICENSE,sha256=wdG-B6-ODk8RQ4jq5uXSn0w1UWTzCH_MMyvh7AwtGns,1074
|
15
|
+
tokenator-0.1.5.dist-info/METADATA,sha256=jVSteW5iVYoqLT727GZUIdmuPNtG3rK7cvPqEBLMmHo,2444
|
16
|
+
tokenator-0.1.5.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
17
|
+
tokenator-0.1.5.dist-info/RECORD,,
|
tokenator-0.1.3.dist-info/RECORD
DELETED
@@ -1,16 +0,0 @@
|
|
1
|
-
tokenator/__init__.py,sha256=SB81-PEkyU-JHuckDNqeoopfpzbfL1jDweLaETPa4J0,626
|
2
|
-
tokenator/base_wrapper.py,sha256=vSu_pStKYulho7_5g0jMCNf84KRxC4kTKep0v8YE61M,2377
|
3
|
-
tokenator/client_anthropic.py,sha256=1ejWIZBxtk-mWTVaKWeMUvS2hZ_Dn-vNKYa3yopdjAU,6714
|
4
|
-
tokenator/client_openai.py,sha256=1xZuRA90kwlflTwEuFkXJHHN584XTeNh1CfEBMLELbQ,6308
|
5
|
-
tokenator/create_migrations.py,sha256=C_3WqB0tOGKXOA4JmvWuLpcyGEysWyRSiSttxX-Kie4,606
|
6
|
-
tokenator/migrations/env.py,sha256=eFTw66gG464JV53740RKU32wqEL8uZFReS_INrvkFrU,1414
|
7
|
-
tokenator/migrations/script.py.mako,sha256=nJL-tbLQE0Qy4P9S4r4ntNAcikPtoFUlvXe6xvm9ot8,635
|
8
|
-
tokenator/migrations.py,sha256=RHm5XI5qh6W-Ib06vz4bXmE9XL211n1lZLzQNHPoSzg,1396
|
9
|
-
tokenator/models.py,sha256=EprE_MMJxDS-YXlcIQLZzfekH7xTYbeOC3bx3B2osVw,1171
|
10
|
-
tokenator/schemas.py,sha256=eVdBWi6_hTETnPw50glq0OvSh3PbP2pLl_aHdf3fi-M,2278
|
11
|
-
tokenator/usage.py,sha256=aHjGwzDzaiVznahNk5HqVyk3IxDo5FtFVfOUCeE7DZ4,7833
|
12
|
-
tokenator/utils.py,sha256=UHV6tKLd6zoz7Fml1LokkbGmN1hvQMfXDY4Aulkhar8,1910
|
13
|
-
tokenator-0.1.3.dist-info/LICENSE,sha256=wdG-B6-ODk8RQ4jq5uXSn0w1UWTzCH_MMyvh7AwtGns,1074
|
14
|
-
tokenator-0.1.3.dist-info/METADATA,sha256=kOesX0EPrxsqvrowcayXbA8phU7Ix9xPmp6Jqb_fYHM,2444
|
15
|
-
tokenator-0.1.3.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
16
|
-
tokenator-0.1.3.dist-info/RECORD,,
|
File without changes
|
File without changes
|