isa-model 0.3.91__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. isa_model/client.py +732 -573
  2. isa_model/core/cache/redis_cache.py +401 -0
  3. isa_model/core/config/config_manager.py +53 -10
  4. isa_model/core/config.py +1 -1
  5. isa_model/core/database/__init__.py +1 -0
  6. isa_model/core/database/migrations.py +277 -0
  7. isa_model/core/database/supabase_client.py +123 -0
  8. isa_model/core/models/__init__.py +37 -0
  9. isa_model/core/models/model_billing_tracker.py +60 -88
  10. isa_model/core/models/model_manager.py +36 -18
  11. isa_model/core/models/model_repo.py +44 -38
  12. isa_model/core/models/model_statistics_tracker.py +234 -0
  13. isa_model/core/models/model_storage.py +0 -1
  14. isa_model/core/models/model_version_manager.py +959 -0
  15. isa_model/core/pricing_manager.py +2 -249
  16. isa_model/core/resilience/circuit_breaker.py +366 -0
  17. isa_model/core/security/secrets.py +358 -0
  18. isa_model/core/services/__init__.py +2 -4
  19. isa_model/core/services/intelligent_model_selector.py +101 -370
  20. isa_model/core/storage/hf_storage.py +1 -1
  21. isa_model/core/types.py +7 -0
  22. isa_model/deployment/cloud/modal/isa_audio_chatTTS_service.py +520 -0
  23. isa_model/deployment/cloud/modal/isa_audio_fish_service.py +0 -0
  24. isa_model/deployment/cloud/modal/isa_audio_openvoice_service.py +758 -0
  25. isa_model/deployment/cloud/modal/isa_audio_service_v2.py +1044 -0
  26. isa_model/deployment/cloud/modal/isa_embed_rerank_service.py +296 -0
  27. isa_model/deployment/cloud/modal/isa_video_hunyuan_service.py +423 -0
  28. isa_model/deployment/cloud/modal/isa_vision_ocr_service.py +519 -0
  29. isa_model/deployment/cloud/modal/isa_vision_qwen25_service.py +709 -0
  30. isa_model/deployment/cloud/modal/isa_vision_table_service.py +467 -323
  31. isa_model/deployment/cloud/modal/isa_vision_ui_service.py +607 -180
  32. isa_model/deployment/cloud/modal/isa_vision_ui_service_optimized.py +660 -0
  33. isa_model/deployment/core/deployment_manager.py +6 -4
  34. isa_model/deployment/services/auto_hf_modal_deployer.py +894 -0
  35. isa_model/eval/benchmarks/__init__.py +27 -0
  36. isa_model/eval/benchmarks/multimodal_datasets.py +460 -0
  37. isa_model/eval/benchmarks.py +244 -12
  38. isa_model/eval/evaluators/__init__.py +8 -2
  39. isa_model/eval/evaluators/audio_evaluator.py +727 -0
  40. isa_model/eval/evaluators/embedding_evaluator.py +742 -0
  41. isa_model/eval/evaluators/vision_evaluator.py +564 -0
  42. isa_model/eval/example_evaluation.py +395 -0
  43. isa_model/eval/factory.py +272 -5
  44. isa_model/eval/isa_benchmarks.py +700 -0
  45. isa_model/eval/isa_integration.py +582 -0
  46. isa_model/eval/metrics.py +159 -6
  47. isa_model/eval/tests/unit/test_basic.py +396 -0
  48. isa_model/inference/ai_factory.py +44 -8
  49. isa_model/inference/services/audio/__init__.py +21 -0
  50. isa_model/inference/services/audio/base_realtime_service.py +225 -0
  51. isa_model/inference/services/audio/isa_tts_service.py +0 -0
  52. isa_model/inference/services/audio/openai_realtime_service.py +320 -124
  53. isa_model/inference/services/audio/openai_stt_service.py +32 -6
  54. isa_model/inference/services/base_service.py +17 -1
  55. isa_model/inference/services/embedding/__init__.py +13 -0
  56. isa_model/inference/services/embedding/base_embed_service.py +111 -8
  57. isa_model/inference/services/embedding/isa_embed_service.py +305 -0
  58. isa_model/inference/services/embedding/openai_embed_service.py +2 -4
  59. isa_model/inference/services/embedding/tests/test_embedding.py +222 -0
  60. isa_model/inference/services/img/__init__.py +2 -2
  61. isa_model/inference/services/img/base_image_gen_service.py +24 -7
  62. isa_model/inference/services/img/replicate_image_gen_service.py +84 -422
  63. isa_model/inference/services/img/services/replicate_face_swap.py +193 -0
  64. isa_model/inference/services/img/services/replicate_flux.py +226 -0
  65. isa_model/inference/services/img/services/replicate_flux_kontext.py +219 -0
  66. isa_model/inference/services/img/services/replicate_sticker_maker.py +249 -0
  67. isa_model/inference/services/img/tests/test_img_client.py +297 -0
  68. isa_model/inference/services/llm/base_llm_service.py +30 -6
  69. isa_model/inference/services/llm/helpers/llm_adapter.py +63 -9
  70. isa_model/inference/services/llm/ollama_llm_service.py +2 -1
  71. isa_model/inference/services/llm/openai_llm_service.py +652 -55
  72. isa_model/inference/services/llm/yyds_llm_service.py +2 -1
  73. isa_model/inference/services/vision/__init__.py +5 -5
  74. isa_model/inference/services/vision/base_vision_service.py +118 -185
  75. isa_model/inference/services/vision/helpers/image_utils.py +11 -5
  76. isa_model/inference/services/vision/isa_vision_service.py +573 -0
  77. isa_model/inference/services/vision/tests/test_ocr_client.py +284 -0
  78. isa_model/serving/api/fastapi_server.py +88 -16
  79. isa_model/serving/api/middleware/auth.py +311 -0
  80. isa_model/serving/api/middleware/security.py +278 -0
  81. isa_model/serving/api/routes/analytics.py +486 -0
  82. isa_model/serving/api/routes/deployments.py +339 -0
  83. isa_model/serving/api/routes/evaluations.py +579 -0
  84. isa_model/serving/api/routes/logs.py +430 -0
  85. isa_model/serving/api/routes/settings.py +582 -0
  86. isa_model/serving/api/routes/unified.py +324 -165
  87. isa_model/serving/api/startup.py +304 -0
  88. isa_model/serving/modal_proxy_server.py +249 -0
  89. isa_model/training/__init__.py +100 -6
  90. isa_model/training/core/__init__.py +4 -1
  91. isa_model/training/examples/intelligent_training_example.py +281 -0
  92. isa_model/training/intelligent/__init__.py +25 -0
  93. isa_model/training/intelligent/decision_engine.py +643 -0
  94. isa_model/training/intelligent/intelligent_factory.py +888 -0
  95. isa_model/training/intelligent/knowledge_base.py +751 -0
  96. isa_model/training/intelligent/resource_optimizer.py +839 -0
  97. isa_model/training/intelligent/task_classifier.py +576 -0
  98. isa_model/training/storage/__init__.py +24 -0
  99. isa_model/training/storage/core_integration.py +439 -0
  100. isa_model/training/storage/training_repository.py +552 -0
  101. isa_model/training/storage/training_storage.py +628 -0
  102. {isa_model-0.3.91.dist-info → isa_model-0.4.0.dist-info}/METADATA +13 -1
  103. isa_model-0.4.0.dist-info/RECORD +182 -0
  104. isa_model/deployment/cloud/modal/isa_vision_doc_service.py +0 -766
  105. isa_model/deployment/cloud/modal/register_models.py +0 -321
  106. isa_model/inference/adapter/unified_api.py +0 -248
  107. isa_model/inference/services/helpers/stacked_config.py +0 -148
  108. isa_model/inference/services/img/flux_professional_service.py +0 -603
  109. isa_model/inference/services/img/helpers/base_stacked_service.py +0 -274
  110. isa_model/inference/services/others/table_transformer_service.py +0 -61
  111. isa_model/inference/services/vision/doc_analysis_service.py +0 -640
  112. isa_model/inference/services/vision/helpers/base_stacked_service.py +0 -274
  113. isa_model/inference/services/vision/ui_analysis_service.py +0 -823
  114. isa_model/scripts/inference_tracker.py +0 -283
  115. isa_model/scripts/mlflow_manager.py +0 -379
  116. isa_model/scripts/model_registry.py +0 -465
  117. isa_model/scripts/register_models.py +0 -370
  118. isa_model/scripts/register_models_with_embeddings.py +0 -510
  119. isa_model/scripts/start_mlflow.py +0 -95
  120. isa_model/scripts/training_tracker.py +0 -257
  121. isa_model-0.3.91.dist-info/RECORD +0 -138
  122. {isa_model-0.3.91.dist-info → isa_model-0.4.0.dist-info}/WHEEL +0 -0
  123. {isa_model-0.3.91.dist-info → isa_model-0.4.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,277 @@
1
+ """
2
+ Database Migration Manager for ISA Model
3
+
4
+ Handles schema creation and updates across different environments.
5
+ Best practices:
6
+ - Environment-specific migrations
7
+ - Version-controlled schema changes
8
+ - Rollback capabilities
9
+ - Schema validation
10
+ """
11
+
12
+ import logging
13
+ import psycopg2
14
+ from typing import Dict, List, Optional
15
+ from pathlib import Path
16
+
17
+ from ..config.config_manager import ConfigManager
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+ class DatabaseMigrations:
22
+ """Manages database migrations across environments"""
23
+
24
+ def __init__(self):
25
+ self.config_manager = ConfigManager()
26
+ self.global_config = self.config_manager.get_global_config()
27
+ self.environment = self.global_config.environment.value
28
+ self.schema = self.global_config.database.supabase_schema or "public"
29
+
30
+ def get_database_url(self) -> str:
31
+ """Get the PostgreSQL connection URL"""
32
+ import os
33
+ return os.getenv('DATABASE_URL')
34
+
35
+ def create_schema(self, schema_name: str) -> bool:
36
+ """Create a schema if it doesn't exist"""
37
+ try:
38
+ conn = psycopg2.connect(self.get_database_url())
39
+ cursor = conn.cursor()
40
+
41
+ cursor.execute(f"CREATE SCHEMA IF NOT EXISTS {schema_name}")
42
+ conn.commit()
43
+ conn.close()
44
+
45
+ logger.info(f"Schema '{schema_name}' created/verified")
46
+ return True
47
+
48
+ except Exception as e:
49
+ logger.error(f"Failed to create schema '{schema_name}': {e}")
50
+ return False
51
+
52
+ def get_schema_migrations(self) -> Dict[str, List[str]]:
53
+ """Define migrations for each schema"""
54
+ return {
55
+ "models_table": [
56
+ f"""
57
+ CREATE TABLE IF NOT EXISTS {self.schema}.models (
58
+ model_id TEXT PRIMARY KEY,
59
+ model_type TEXT NOT NULL,
60
+ provider TEXT NOT NULL,
61
+ metadata JSONB DEFAULT '{{}}',
62
+ created_at TIMESTAMPTZ DEFAULT NOW(),
63
+ updated_at TIMESTAMPTZ DEFAULT NOW()
64
+ );
65
+ """,
66
+ f"CREATE INDEX IF NOT EXISTS idx_{self.schema}_models_type ON {self.schema}.models(model_type);",
67
+ f"CREATE INDEX IF NOT EXISTS idx_{self.schema}_models_provider ON {self.schema}.models(provider);"
68
+ ],
69
+ "model_capabilities_table": [
70
+ f"""
71
+ CREATE TABLE IF NOT EXISTS {self.schema}.model_capabilities (
72
+ model_id TEXT REFERENCES {self.schema}.models(model_id) ON DELETE CASCADE,
73
+ capability TEXT NOT NULL,
74
+ created_at TIMESTAMPTZ DEFAULT NOW(),
75
+ PRIMARY KEY (model_id, capability)
76
+ );
77
+ """,
78
+ f"CREATE INDEX IF NOT EXISTS idx_{self.schema}_capabilities_capability ON {self.schema}.model_capabilities(capability);"
79
+ ],
80
+ # "model_usage_table": [
81
+ # f"""
82
+ # CREATE TABLE IF NOT EXISTS {self.schema}.model_usage (
83
+ # id BIGSERIAL PRIMARY KEY,
84
+ # timestamp TIMESTAMPTZ NOT NULL,
85
+ # model_id TEXT NOT NULL,
86
+ # operation_type TEXT NOT NULL,
87
+ # provider TEXT NOT NULL,
88
+ # service_type TEXT NOT NULL,
89
+ # operation TEXT NOT NULL,
90
+ # input_tokens INTEGER DEFAULT NULL,
91
+ # output_tokens INTEGER DEFAULT NULL,
92
+ # total_tokens INTEGER DEFAULT NULL,
93
+ # input_units DECIMAL DEFAULT NULL,
94
+ # output_units DECIMAL DEFAULT NULL,
95
+ # cost_usd DECIMAL(12,8) DEFAULT 0,
96
+ # metadata JSONB DEFAULT '{{}}',
97
+ # created_at TIMESTAMPTZ DEFAULT NOW()
98
+ # );
99
+ # """,
100
+ # f"CREATE INDEX IF NOT EXISTS idx_{self.schema}_usage_model_id ON {self.schema}.model_usage(model_id);",
101
+ # f"CREATE INDEX IF NOT EXISTS idx_{self.schema}_usage_timestamp ON {self.schema}.model_usage(timestamp);",
102
+ # f"CREATE INDEX IF NOT EXISTS idx_{self.schema}_usage_provider ON {self.schema}.model_usage(provider);",
103
+ # f"CREATE INDEX IF NOT EXISTS idx_{self.schema}_usage_operation_type ON {self.schema}.model_usage(operation_type);"
104
+ # ],
105
+ "model_embeddings_table": [
106
+ # Try to create with VECTOR type, fallback to JSONB if pgvector not available
107
+ f"""
108
+ DO $$
109
+ BEGIN
110
+ -- Try to create with VECTOR type
111
+ BEGIN
112
+ CREATE TABLE IF NOT EXISTS {self.schema}.model_embeddings (
113
+ id BIGSERIAL PRIMARY KEY,
114
+ model_id TEXT REFERENCES {self.schema}.models(model_id) ON DELETE CASCADE,
115
+ provider TEXT NOT NULL,
116
+ description TEXT NOT NULL,
117
+ embedding VECTOR(1536),
118
+ created_at TIMESTAMPTZ DEFAULT NOW(),
119
+ updated_at TIMESTAMPTZ DEFAULT NOW()
120
+ );
121
+ EXCEPTION WHEN undefined_object THEN
122
+ -- Fallback to JSONB if VECTOR type doesn't exist
123
+ CREATE TABLE IF NOT EXISTS {self.schema}.model_embeddings (
124
+ id BIGSERIAL PRIMARY KEY,
125
+ model_id TEXT REFERENCES {self.schema}.models(model_id) ON DELETE CASCADE,
126
+ provider TEXT NOT NULL,
127
+ description TEXT NOT NULL,
128
+ embedding JSONB,
129
+ created_at TIMESTAMPTZ DEFAULT NOW(),
130
+ updated_at TIMESTAMPTZ DEFAULT NOW()
131
+ );
132
+ END;
133
+ END $$;
134
+ """,
135
+ f"CREATE INDEX IF NOT EXISTS idx_{self.schema}_embeddings_model_id ON {self.schema}.model_embeddings(model_id);"
136
+ ],
137
+ "tool_embeddings_table": [
138
+ f"""
139
+ CREATE TABLE IF NOT EXISTS {self.schema}.tool_embeddings (
140
+ id BIGSERIAL PRIMARY KEY,
141
+ tool_name TEXT UNIQUE NOT NULL,
142
+ description TEXT,
143
+ embedding JSONB,
144
+ created_at TIMESTAMPTZ DEFAULT NOW(),
145
+ updated_at TIMESTAMPTZ DEFAULT NOW()
146
+ );
147
+ """,
148
+ f"CREATE INDEX IF NOT EXISTS idx_{self.schema}_tool_embeddings_name ON {self.schema}.tool_embeddings(tool_name);"
149
+ ],
150
+ "prompt_embeddings_table": [
151
+ f"""
152
+ CREATE TABLE IF NOT EXISTS {self.schema}.prompt_embeddings (
153
+ id BIGSERIAL PRIMARY KEY,
154
+ prompt_name TEXT UNIQUE NOT NULL,
155
+ description TEXT,
156
+ embedding JSONB,
157
+ created_at TIMESTAMPTZ DEFAULT NOW(),
158
+ updated_at TIMESTAMPTZ DEFAULT NOW()
159
+ );
160
+ """,
161
+ f"CREATE INDEX IF NOT EXISTS idx_{self.schema}_prompt_embeddings_name ON {self.schema}.prompt_embeddings(prompt_name);"
162
+ ],
163
+ "resource_embeddings_table": [
164
+ f"""
165
+ CREATE TABLE IF NOT EXISTS {self.schema}.resource_embeddings (
166
+ id BIGSERIAL PRIMARY KEY,
167
+ resource_uri TEXT UNIQUE NOT NULL,
168
+ category TEXT,
169
+ name TEXT,
170
+ description TEXT,
171
+ embedding JSONB,
172
+ created_at TIMESTAMPTZ DEFAULT NOW(),
173
+ updated_at TIMESTAMPTZ DEFAULT NOW()
174
+ );
175
+ """,
176
+ f"CREATE INDEX IF NOT EXISTS idx_{self.schema}_resource_embeddings_uri ON {self.schema}.resource_embeddings(resource_uri);",
177
+ f"CREATE INDEX IF NOT EXISTS idx_{self.schema}_resource_embeddings_category ON {self.schema}.resource_embeddings(category);"
178
+ ]
179
+ }
180
+
181
+ def run_migrations(self) -> bool:
182
+ """Run all migrations for the current environment"""
183
+ try:
184
+ # First, ensure schema exists
185
+ if not self.create_schema(self.schema):
186
+ return False
187
+
188
+ conn = psycopg2.connect(self.get_database_url())
189
+ cursor = conn.cursor()
190
+
191
+ migrations = self.get_schema_migrations()
192
+
193
+ for migration_name, sql_statements in migrations.items():
194
+ logger.info(f"Running migration: {migration_name}")
195
+
196
+ for sql in sql_statements:
197
+ try:
198
+ cursor.execute(sql)
199
+ logger.debug(f"Executed: {sql[:100]}...")
200
+ except Exception as e:
201
+ logger.error(f"Failed to execute SQL in {migration_name}: {e}")
202
+ logger.error(f"SQL: {sql}")
203
+ conn.rollback()
204
+ return False
205
+
206
+ conn.commit()
207
+ logger.info(f"Migration '{migration_name}' completed")
208
+
209
+ conn.close()
210
+ logger.info(f"All migrations completed for {self.environment} environment (schema: {self.schema})")
211
+ return True
212
+
213
+ except Exception as e:
214
+ logger.error(f"Migration failed: {e}")
215
+ return False
216
+
217
+ def validate_schema(self) -> Dict[str, bool]:
218
+ """Validate that all required tables exist with correct structure"""
219
+ results = {}
220
+
221
+ try:
222
+ conn = psycopg2.connect(self.get_database_url())
223
+ cursor = conn.cursor()
224
+
225
+ # Check each required table
226
+ required_tables = ['models', 'model_capabilities', 'model_embeddings', 'tool_embeddings', 'prompt_embeddings', 'resource_embeddings']
227
+
228
+ for table in required_tables:
229
+ cursor.execute(f"""
230
+ SELECT COUNT(*)
231
+ FROM information_schema.tables
232
+ WHERE table_schema = '{self.schema}' AND table_name = '{table}'
233
+ """)
234
+
235
+ exists = cursor.fetchone()[0] > 0
236
+ results[table] = exists
237
+
238
+ if exists:
239
+ logger.info(f"✅ Table {self.schema}.{table} exists")
240
+ else:
241
+ logger.warning(f"❌ Table {self.schema}.{table} missing")
242
+
243
+ conn.close()
244
+
245
+ except Exception as e:
246
+ logger.error(f"Schema validation failed: {e}")
247
+
248
+ return results
249
+
250
+ def run_environment_migrations():
251
+ """Convenience function to run migrations for current environment"""
252
+ migrations = DatabaseMigrations()
253
+
254
+ logger.info(f"Starting migrations for {migrations.environment} environment")
255
+ logger.info(f"Target schema: {migrations.schema}")
256
+
257
+ # Run migrations
258
+ success = migrations.run_migrations()
259
+
260
+ if success:
261
+ # Validate schema
262
+ validation = migrations.validate_schema()
263
+ all_valid = all(validation.values())
264
+
265
+ if all_valid:
266
+ logger.info("🎯 All migrations completed and validated successfully!")
267
+ else:
268
+ logger.warning("⚠️ Migrations completed but validation found issues")
269
+
270
+ return all_valid
271
+ else:
272
+ logger.error("❌ Migration failed")
273
+ return False
274
+
275
+ if __name__ == "__main__":
276
+ # Run migrations when script is executed directly
277
+ run_environment_migrations()
@@ -0,0 +1,123 @@
1
+ """
2
+ Centralized Supabase Client for ISA Model Core
3
+
4
+ Provides a singleton Supabase client instance that:
5
+ - Gets configuration from ConfigManager
6
+ - Handles environment-based schema selection
7
+ - Provides a single point of database access for all services
8
+ """
9
+
10
+ import logging
11
+ from typing import Optional
12
+ from supabase import create_client, Client
13
+
14
+ from ..config.config_manager import ConfigManager
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+ class SupabaseClient:
19
+ """Singleton Supabase client with environment-aware configuration"""
20
+
21
+ _instance: Optional['SupabaseClient'] = None
22
+ _client: Optional[Client] = None
23
+ _initialized = False
24
+
25
+ def __new__(cls):
26
+ if cls._instance is None:
27
+ cls._instance = super().__new__(cls)
28
+ return cls._instance
29
+
30
+ def __init__(self):
31
+ if not self._initialized:
32
+ self._initialize_client()
33
+ SupabaseClient._initialized = True
34
+
35
+ def _initialize_client(self):
36
+ """Initialize the Supabase client with configuration from ConfigManager"""
37
+ try:
38
+ # Get configuration from ConfigManager
39
+ config_manager = ConfigManager()
40
+ global_config = config_manager.get_global_config()
41
+
42
+ # Get database configuration
43
+ self.url = global_config.database.supabase_url
44
+ self.key = global_config.database.supabase_key
45
+ self.schema = global_config.database.supabase_schema or "public"
46
+ self.environment = global_config.environment.value
47
+
48
+ if not self.url or not self.key:
49
+ raise ValueError("Supabase URL and key must be configured")
50
+
51
+ # Create the client
52
+ self._client = create_client(self.url, self.key)
53
+
54
+ logger.info(f"Supabase client initialized for {self.environment} environment (schema: {self.schema})")
55
+
56
+ except Exception as e:
57
+ logger.error(f"Failed to initialize Supabase client: {e}")
58
+ raise
59
+
60
+ def get_client(self) -> Client:
61
+ """Get the Supabase client instance"""
62
+ if not self._client:
63
+ raise RuntimeError("Supabase client not initialized")
64
+ return self._client
65
+
66
+ def table(self, table_name: str):
67
+ """Get a table with the correct schema"""
68
+ if not self._client:
69
+ raise RuntimeError("Supabase client not initialized")
70
+
71
+ # Use the configured schema for the environment
72
+ if self.schema and self.schema != "public":
73
+ return self._client.schema(self.schema).table(table_name)
74
+ else:
75
+ return self._client.table(table_name)
76
+
77
+ def rpc(self, function_name: str, params: Optional[dict] = None):
78
+ """Call an RPC function with the correct schema"""
79
+ if not self._client:
80
+ raise RuntimeError("Supabase client not initialized")
81
+
82
+ # RPC functions typically use the public schema
83
+ # But we can extend this if needed for schema-specific functions
84
+ return self._client.rpc(function_name, params)
85
+
86
+ def get_schema(self) -> str:
87
+ """Get the current schema being used"""
88
+ return self.schema
89
+
90
+ def get_environment(self) -> str:
91
+ """Get the current environment"""
92
+ return self.environment
93
+
94
+ def test_connection(self) -> bool:
95
+ """Test the database connection"""
96
+ try:
97
+ # Try a simple query to test connection
98
+ result = self.table('models').select('*').limit(1).execute()
99
+ logger.debug("Database connection test successful")
100
+ return True
101
+ except Exception as e:
102
+ logger.warning(f"Database connection test failed: {e}")
103
+ return False
104
+
105
+ # Global singleton instance
106
+ _supabase_client = None
107
+
108
+ def get_supabase_client() -> SupabaseClient:
109
+ """Get the global Supabase client instance"""
110
+ global _supabase_client
111
+ if _supabase_client is None:
112
+ _supabase_client = SupabaseClient()
113
+ return _supabase_client
114
+
115
+ def get_supabase_table(table_name: str):
116
+ """Convenience function to get a table with correct schema"""
117
+ client = get_supabase_client()
118
+ return client.table(table_name)
119
+
120
+ def get_supabase_rpc(function_name: str, params: Optional[dict] = None):
121
+ """Convenience function to call RPC functions"""
122
+ client = get_supabase_client()
123
+ return client.rpc(function_name, params)
@@ -0,0 +1,37 @@
1
+ """
2
+ Core Models Module
3
+
4
+ Provides model management, registry, and lifecycle components for the ISA Model SDK.
5
+
6
+ This module includes:
7
+ - ModelRegistry: Central model registry and metadata management
8
+ - ModelManager: High-level model lifecycle management
9
+ - ModelVersionManager: Version control and lineage tracking
10
+ - ModelBillingTracker: Cost tracking and billing
11
+ - ModelStatisticsTracker: Usage statistics and analytics
12
+ """
13
+
14
+ from .model_repo import ModelRegistry, ModelType, ModelCapability
15
+ from .model_manager import ModelManager
16
+ from .model_version_manager import ModelVersionManager, ModelVersion, VersionType
17
+ from .model_billing_tracker import ModelBillingTracker
18
+ from .model_statistics_tracker import ModelStatisticsTracker
19
+
20
+ __all__ = [
21
+ # Core registry and types
22
+ 'ModelRegistry',
23
+ 'ModelType',
24
+ 'ModelCapability',
25
+
26
+ # Model management
27
+ 'ModelManager',
28
+
29
+ # Version management
30
+ 'ModelVersionManager',
31
+ 'ModelVersion',
32
+ 'VersionType',
33
+
34
+ # Tracking and analytics
35
+ 'ModelBillingTracker',
36
+ 'ModelStatisticsTracker'
37
+ ]
@@ -40,6 +40,7 @@ class ServiceType(Enum):
40
40
  IMAGE_GENERATION = "image_generation"
41
41
  AUDIO_STT = "audio_stt"
42
42
  AUDIO_TTS = "audio_tts"
43
+ AUDIO_REALTIME = "audio_realtime"
43
44
 
44
45
  @dataclass
45
46
  class ModelUsageRecord:
@@ -108,7 +109,7 @@ class ModelBillingTracker:
108
109
  def _load_data(self):
109
110
  """Load existing billing data from registry or local storage"""
110
111
  try:
111
- if self.model_registry and hasattr(self.model_registry, 'supabase'):
112
+ if self.model_registry and hasattr(self.model_registry, 'supabase_client'):
112
113
  # Load from Supabase
113
114
  self._load_from_supabase()
114
115
  else:
@@ -119,33 +120,10 @@ class ModelBillingTracker:
119
120
  self.usage_records = []
120
121
 
121
122
  def _load_from_supabase(self):
122
- """Load billing data from Supabase"""
123
- try:
124
- if not self.model_registry or not hasattr(self.model_registry, 'supabase'):
125
- logger.warning("No Supabase client available for billing data loading")
126
- self.usage_records = []
127
- return
128
-
129
- # Query model_usage table for recent usage records (last 30 days)
130
- from datetime import datetime, timedelta
131
- thirty_days_ago = (datetime.now() - timedelta(days=30)).isoformat()
132
-
133
- result = self.model_registry.supabase.table('model_usage').select('*').gte('timestamp', thirty_days_ago).order('timestamp', desc=True).execute()
134
-
135
- if result.data:
136
- self.usage_records = [
137
- ModelUsageRecord.from_dict(record)
138
- for record in result.data
139
- ]
140
- logger.info(f"Loaded {len(self.usage_records)} billing records from Supabase")
141
- else:
142
- self.usage_records = []
143
- logger.info("No billing records found in Supabase")
144
-
145
- except Exception as e:
146
- logger.error(f"Failed to load billing data from Supabase: {e}")
147
- # Fallback to empty records
148
- self.usage_records = []
123
+ """Load billing data from Supabase - disabled as model_usage table removed"""
124
+ # Model usage table has been removed, so no data to load
125
+ self.usage_records = []
126
+ logger.info("Model usage tracking disabled - no billing records to load from Supabase")
149
127
 
150
128
  def _load_from_local(self):
151
129
  """Load billing data from local JSON file"""
@@ -161,7 +139,7 @@ class ModelBillingTracker:
161
139
  def _save_data(self):
162
140
  """Save billing data to registry or local storage"""
163
141
  try:
164
- if self.model_registry and hasattr(self.model_registry, 'supabase'):
142
+ if self.model_registry and hasattr(self.model_registry, 'supabase_client'):
165
143
  self._save_to_supabase()
166
144
  else:
167
145
  self._save_to_local()
@@ -170,52 +148,55 @@ class ModelBillingTracker:
170
148
 
171
149
  def _save_to_supabase(self):
172
150
  """Save billing data to Supabase"""
173
- try:
174
- if not self.model_registry or not hasattr(self.model_registry, 'supabase'):
175
- logger.warning("No Supabase client available for billing data saving")
176
- return
177
-
178
- if not self.usage_records:
179
- logger.debug("No usage records to save")
180
- return
181
-
182
- # Convert usage records to dict format for Supabase
183
- records_to_save = []
184
- for record in self.usage_records:
185
- record_dict = record.to_dict()
186
- # Ensure all required fields are present and properly formatted
187
- record_dict['created_at'] = record_dict.get('timestamp')
188
- records_to_save.append(record_dict)
189
-
190
- # Insert records into model_usage table (upsert to handle duplicates)
191
- result = self.model_registry.supabase.table('model_usage').upsert(
192
- records_to_save,
193
- on_conflict='timestamp,model_id,operation' # Avoid duplicates based on these fields
194
- ).execute()
195
-
196
- if result.data:
197
- logger.info(f"Successfully saved {len(result.data)} billing records to Supabase")
198
- else:
199
- logger.warning("No records were saved to Supabase")
200
-
201
- except Exception as e:
202
- logger.error(f"Failed to save billing data to Supabase: {e}")
203
- # Fallback to local storage on Supabase failure
204
- logger.info("Falling back to local storage for billing data")
205
- self._save_to_local()
151
+ # Disabled to prevent model_usage table writes
152
+ # try:
153
+ # if not self.model_registry or not hasattr(self.model_registry, 'supabase_client'):
154
+ # logger.warning("No Supabase client available for billing data saving")
155
+ # return
156
+ #
157
+ # if not self.usage_records:
158
+ # logger.debug("No usage records to save")
159
+ # return
160
+ #
161
+ # # Convert usage records to dict format for Supabase
162
+ # records_to_save = []
163
+ # for record in self.usage_records:
164
+ # record_dict = record.to_dict()
165
+ # # Ensure all required fields are present and properly formatted
166
+ # record_dict['created_at'] = record_dict.get('timestamp')
167
+ # records_to_save.append(record_dict)
168
+ #
169
+ # # Insert records into model_usage table (simple insert, let DB handle duplicates)
170
+ # result = self.model_registry.supabase_client.table('model_usage').insert(
171
+ # records_to_save
172
+ # ).execute()
173
+ #
174
+ # if result.data:
175
+ # logger.info(f"Successfully saved {len(result.data)} billing records to Supabase")
176
+ # else:
177
+ # logger.warning("No records were saved to Supabase")
178
+ #
179
+ # except Exception as e:
180
+ # logger.error(f"Failed to save billing data to Supabase: {e}")
181
+ # # Fallback to local storage on Supabase failure
182
+ # logger.info("Falling back to local storage for billing data")
183
+ # self._save_to_local()
184
+ pass
206
185
 
207
186
  def _save_to_local(self):
208
187
  """Save billing data to local JSON file"""
209
- self.storage_path.parent.mkdir(parents=True, exist_ok=True)
210
-
211
- data = {
212
- "session_start": self.session_start,
213
- "last_updated": datetime.now(timezone.utc).isoformat(),
214
- "usage_records": [record.to_dict() for record in self.usage_records]
215
- }
216
-
217
- with open(self.storage_path, 'w') as f:
218
- json.dump(data, f, indent=2)
188
+ # Disabled to prevent automatic file creation
189
+ # self.storage_path.parent.mkdir(parents=True, exist_ok=True)
190
+ #
191
+ # data = {
192
+ # "session_start": self.session_start,
193
+ # "last_updated": datetime.now(timezone.utc).isoformat(),
194
+ # "usage_records": [record.to_dict() for record in self.usage_records]
195
+ # }
196
+ #
197
+ # with open(self.storage_path, 'w') as f:
198
+ # json.dump(data, f, indent=2)
199
+ pass
219
200
 
220
201
  def track_model_usage(
221
202
  self,
@@ -307,23 +288,14 @@ class ModelBillingTracker:
307
288
  # Import here to avoid circular imports
308
289
  from .model_manager import ModelManager
309
290
 
310
- # Get model info to determine provider model name
311
- if self.model_registry:
312
- model_info = self.model_registry.get_model_info(model_id)
313
- if model_info and model_info.get('metadata'):
314
- provider_model_name = model_info['metadata'].get('provider_model_name')
315
- if provider_model_name:
316
- # Use ModelManager pricing
317
- pricing = ModelManager.MODEL_PRICING.get(provider, {}).get(provider_model_name)
318
- if pricing:
319
- cost = 0.0
320
- if input_tokens is not None and "input" in pricing:
321
- cost += (input_tokens / 1000000) * pricing["input"]
322
- if output_tokens is not None and "output" in pricing:
323
- cost += (output_tokens / 1000000) * pricing["output"]
324
- return cost
291
+ # Create ModelManager instance to get pricing
292
+ model_manager = ModelManager()
293
+
294
+ # Use the centralized pricing calculation
295
+ if input_tokens is not None and output_tokens is not None:
296
+ return model_manager.calculate_cost(provider, model_id, input_tokens, output_tokens)
325
297
 
326
- # Fallback to default pricing if model not found
298
+ # Fallback for non-token based services
327
299
  return 0.0
328
300
 
329
301
  except Exception as e: