minix 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. minix-0.1.0/PKG-INFO +21 -0
  2. minix-0.1.0/README.md +1 -0
  3. minix-0.1.0/pyproject.toml +28 -0
  4. minix-0.1.0/src/minix/__init__.py +0 -0
  5. minix-0.1.0/src/minix/core/__init__.py +0 -0
  6. minix-0.1.0/src/minix/core/bootstrap/__init__.py +1 -0
  7. minix-0.1.0/src/minix/core/bootstrap/bootstrap.py +58 -0
  8. minix-0.1.0/src/minix/core/connectors/__init__.py +1 -0
  9. minix-0.1.0/src/minix/core/connectors/connector.py +4 -0
  10. minix-0.1.0/src/minix/core/connectors/object_storage_connector/__init__.py +4 -0
  11. minix-0.1.0/src/minix/core/connectors/object_storage_connector/config.py +11 -0
  12. minix-0.1.0/src/minix/core/connectors/object_storage_connector/connector.py +121 -0
  13. minix-0.1.0/src/minix/core/connectors/qdrant_connector/__init__.py +1 -0
  14. minix-0.1.0/src/minix/core/connectors/qdrant_connector/connector.py +20 -0
  15. minix-0.1.0/src/minix/core/connectors/sql_connector/__init__.py +1 -0
  16. minix-0.1.0/src/minix/core/connectors/sql_connector/sql_connector.py +96 -0
  17. minix-0.1.0/src/minix/core/consumer/__init__.py +1 -0
  18. minix-0.1.0/src/minix/core/consumer/async_consumer.py +83 -0
  19. minix-0.1.0/src/minix/core/controller/__init__.py +1 -0
  20. minix-0.1.0/src/minix/core/controller/controller.py +32 -0
  21. minix-0.1.0/src/minix/core/entity/__init__.py +4 -0
  22. minix-0.1.0/src/minix/core/entity/entity.py +4 -0
  23. minix-0.1.0/src/minix/core/entity/qdrant_entity.py +26 -0
  24. minix-0.1.0/src/minix/core/entity/redis_entity.py +4 -0
  25. minix-0.1.0/src/minix/core/entity/sql_entity.py +25 -0
  26. minix-0.1.0/src/minix/core/install/__init__.py +1 -0
  27. minix-0.1.0/src/minix/core/install/installable.py +11 -0
  28. minix-0.1.0/src/minix/core/model/__init__.py +4 -0
  29. minix-0.1.0/src/minix/core/model/embedding_model.py +20 -0
  30. minix-0.1.0/src/minix/core/model/mlflow_model.py +134 -0
  31. minix-0.1.0/src/minix/core/model/model.py +21 -0
  32. minix-0.1.0/src/minix/core/model/model_registry.py +28 -0
  33. minix-0.1.0/src/minix/core/module/__init__.py +1 -0
  34. minix-0.1.0/src/minix/core/module/module.py +79 -0
  35. minix-0.1.0/src/minix/core/registry/__init__.py +1 -0
  36. minix-0.1.0/src/minix/core/registry/registry.py +28 -0
  37. minix-0.1.0/src/minix/core/repository/__init__.py +3 -0
  38. minix-0.1.0/src/minix/core/repository/qdrant/__init__.py +1 -0
  39. minix-0.1.0/src/minix/core/repository/qdrant/qdrant_repository.py +39 -0
  40. minix-0.1.0/src/minix/core/repository/redis/__init__.py +1 -0
  41. minix-0.1.0/src/minix/core/repository/redis/redis_repository.py +13 -0
  42. minix-0.1.0/src/minix/core/repository/repository.py +8 -0
  43. minix-0.1.0/src/minix/core/repository/sql/__init__.py +1 -0
  44. minix-0.1.0/src/minix/core/repository/sql/sql_repository.py +78 -0
  45. minix-0.1.0/src/minix/core/scheduler/__init__.py +1 -0
  46. minix-0.1.0/src/minix/core/scheduler/scheduler.py +105 -0
  47. minix-0.1.0/src/minix/core/scheduler/task/__init__.py +1 -0
  48. minix-0.1.0/src/minix/core/scheduler/task/task.py +47 -0
  49. minix-0.1.0/src/minix/core/service/__init__.py +4 -0
  50. minix-0.1.0/src/minix/core/service/qdrant/__init__.py +1 -0
  51. minix-0.1.0/src/minix/core/service/qdrant/qdrant_service.py +26 -0
  52. minix-0.1.0/src/minix/core/service/redis/__init__.py +1 -0
  53. minix-0.1.0/src/minix/core/service/redis/redis_service.py +9 -0
  54. minix-0.1.0/src/minix/core/service/service.py +20 -0
  55. minix-0.1.0/src/minix/core/service/sql/__init__.py +1 -0
  56. minix-0.1.0/src/minix/core/service/sql/sql_service.py +39 -0
  57. minix-0.1.0/src/minix/core/utils/__init__.py +1 -0
  58. minix-0.1.0/src/minix/core/utils/mlflow/__init__.py +0 -0
  59. minix-0.1.0/src/minix/core/utils/mlflow/log_metric.py +14 -0
  60. minix-0.1.0/src/minix/core/utils/singleton/__init__.py +1 -0
  61. minix-0.1.0/src/minix/core/utils/singleton/singleton.py +12 -0
minix-0.1.0/PKG-INFO ADDED
@@ -0,0 +1,21 @@
1
+ Metadata-Version: 2.3
2
+ Name: minix
3
+ Version: 0.1.0
4
+ Summary:
5
+ Author: AmirHossein Advari
6
+ Author-email: amiradvari@gmail.com
7
+ Requires-Python: >=3.12
8
+ Classifier: Programming Language :: Python :: 3
9
+ Classifier: Programming Language :: Python :: 3.12
10
+ Classifier: Programming Language :: Python :: 3.13
11
+ Requires-Dist: aiokafka (>=0.12.0,<0.13.0)
12
+ Requires-Dist: boto3 (>=1.39.14,<2.0.0)
13
+ Requires-Dist: celery (>=5.5.3,<6.0.0)
14
+ Requires-Dist: dotenv (>=0.9.9,<0.10.0)
15
+ Requires-Dist: fastapi (>=0.116.1,<0.117.0)
16
+ Requires-Dist: pymysql (>=1.1.1,<2.0.0)
17
+ Requires-Dist: sqlalchemy (>=2.0.41,<3.0.0)
18
+ Requires-Dist: uvicorn (>=0.35.0,<0.36.0)
19
+ Description-Content-Type: text/markdown
20
+
21
+ Simple Backend framework for data applications.
minix-0.1.0/README.md ADDED
@@ -0,0 +1 @@
1
+ Simple Backend framework for data applications.
@@ -0,0 +1,28 @@
1
+ [project]
2
+ name = "minix"
3
+ version = "0.1.0"
4
+ description = ""
5
+ authors = [
6
+ {name = "AmirHossein Advari",email = "amiradvari@gmail.com"}
7
+ ]
8
+ readme = "README.md"
9
+ requires-python = ">=3.12"
10
+ dependencies = [
11
+ "fastapi (>=0.116.1,<0.117.0)",
12
+ "uvicorn (>=0.35.0,<0.36.0)",
13
+ "celery (>=5.5.3,<6.0.0)",
14
+ "boto3 (>=1.39.14,<2.0.0)",
15
+ "aiokafka (>=0.12.0,<0.13.0)",
16
+ "sqlalchemy (>=2.0.41,<3.0.0)",
17
+ "dotenv (>=0.9.9,<0.10.0)",
18
+ "pymysql (>=1.1.1,<2.0.0)"
19
+ ]
20
+
21
+
22
+ [build-system]
23
+ requires = ["poetry-core>=2.0.0,<3.0.0"]
24
+ build-backend = "poetry.core.masonry.api"
25
+
26
+
27
+ [tool.poetry.extras]
28
+ ai = ["torch", "mlflow"]
File without changes
File without changes
@@ -0,0 +1 @@
1
+ from .bootstrap import bootstrap
@@ -0,0 +1,58 @@
1
+ import pymysql
2
+
3
+ from src.minix.core.connectors import Connector
4
+ from src.minix.core.module import Module
5
+ from fastapi import FastAPI
6
+ from src.minix.core.registry import Registry
7
+ from src.minix.core.scheduler import SchedulerConfig, Scheduler
8
+ pymysql.install_as_MySQLdb()
9
+ import os
10
+ import dotenv
11
+ dotenv.load_dotenv()
12
+
13
+
14
+ def register_connectors(connectors: list[Connector]):
15
+ for connector in connectors:
16
+ Registry().register(connector.__class__, connector)
17
+ def register_scheduler():
18
+ Registry().register(Scheduler, Scheduler(
19
+ SchedulerConfig()
20
+ .set_broker_url(os.getenv('CELERY_BROKER_URL'))
21
+ .set_result_backend(os.getenv('CELERY_RESULT_BACKEND'))
22
+ .set_task_serializer('json')
23
+ .set_result_serializer('json')
24
+ .set_accept_content(['json'])
25
+ .set_timezone('GMT')
26
+ ))
27
+ def register_fast_api():
28
+ Registry().register(FastAPI, FastAPI())
29
+ def register_modules(modules: list[Module]):
30
+ fast_api = False
31
+ scheduler = False
32
+ for module in modules:
33
+ if module.controllers is not None and len(module.controllers) > 0:
34
+ fast_api = True
35
+ if module.periodic_tasks is not None and len(module.periodic_tasks) > 0:
36
+ scheduler = True
37
+ if module.tasks is not None and len(module.tasks) > 0:
38
+ scheduler = True
39
+
40
+ if fast_api:
41
+ register_fast_api()
42
+ if scheduler:
43
+ register_scheduler()
44
+ for module in modules:
45
+ module.install()
46
+
47
+
48
+ def bootstrap(
49
+ modules: list[Module] = None,
50
+ connectors: list[Connector] = None
51
+ ):
52
+
53
+ if modules:
54
+ register_modules(modules)
55
+ if connectors:
56
+ register_connectors(connectors)
57
+
58
+
@@ -0,0 +1 @@
1
+ from .connector import Connector
@@ -0,0 +1,4 @@
1
+
2
+
3
+ class Connector:
4
+ pass
@@ -0,0 +1,4 @@
1
+ from .connector import ObjectStorageConnector
2
+ from .config import ObjectStorageConfig
3
+
4
+ __all__ = ['ObjectStorageConnector', 'ObjectStorageConfig']
@@ -0,0 +1,11 @@
1
+ from pydantic import BaseModel
2
+
3
+ class ObjectStorageConfig(BaseModel):
4
+ """Configuration for object storage connection."""
5
+ endpoint_url: str
6
+ access_key: str
7
+ secret_key: str
8
+ bucket_name: str
9
+ use_ssl: bool = True
10
+ verify_ssl: bool = True
11
+
@@ -0,0 +1,121 @@
1
+ import boto3
2
+ from botocore.exceptions import ClientError
3
+ from typing import Optional, BinaryIO, Dict, Any
4
+ import logging
5
+ from .config import ObjectStorageConfig
6
+ from .. import Connector
7
+
8
+ logger = logging.getLogger(__name__)
9
+
10
+ class ObjectStorageConnector(Connector):
11
+ """Connector for object storage operations."""
12
+
13
+ def __init__(self, config: ObjectStorageConfig):
14
+ self.config = config
15
+ self.client = self._create_client()
16
+ self.bucket_name = config.bucket_name
17
+
18
+ def _create_client(self) -> boto3.client:
19
+ """Create and return an S3 client."""
20
+ return boto3.client(
21
+ 's3',
22
+ endpoint_url=self.config.endpoint_url,
23
+ aws_access_key_id=self.config.access_key,
24
+ aws_secret_access_key=self.config.secret_key,
25
+ use_ssl=self.config.use_ssl,
26
+ verify=self.config.verify_ssl
27
+ )
28
+
29
+ async def upload_file(self, file_obj: BinaryIO, object_key: str, metadata: Optional[Dict[str, str]] = None) -> bool:
30
+ """Upload a file to object storage."""
31
+ try:
32
+ extra_args = {'Metadata': metadata} if metadata else {}
33
+ self.client.upload_fileobj(
34
+ file_obj,
35
+ self.bucket_name,
36
+ object_key,
37
+ ExtraArgs=extra_args
38
+ )
39
+ return True
40
+ except ClientError as e:
41
+ logger.error(f"Error uploading file {object_key}: {str(e)}")
42
+ return False
43
+
44
+ async def download_file(self, object_key: str, file_obj: BinaryIO) -> bool:
45
+ """Download a file from object storage."""
46
+ try:
47
+ self.client.download_fileobj(
48
+ self.bucket_name,
49
+ object_key,
50
+ file_obj
51
+ )
52
+ return True
53
+ except ClientError as e:
54
+ logger.error(f"Error downloading file {object_key}: {str(e)}")
55
+ return False
56
+
57
+ async def download_to_dir(self, object_key: str, directory: str) -> bool:
58
+ """Download a file to a specified directory."""
59
+ try:
60
+ file_path = f"{directory}/{object_key}"
61
+ with open(file_path, 'wb') as file_obj:
62
+ return await self.download_file(object_key, file_obj)
63
+ except Exception as e:
64
+ logger.error(f"Error downloading file {object_key} to {directory}: {str(e)}")
65
+ return False
66
+
67
+ async def delete_file(self, object_key: str) -> bool:
68
+ """Delete a file from object storage."""
69
+ try:
70
+ self.client.delete_object(
71
+ Bucket=self.bucket_name,
72
+ Key=object_key
73
+ )
74
+ return True
75
+ except ClientError as e:
76
+ logger.error(f"Error deleting file {object_key}: {str(e)}")
77
+ return False
78
+
79
+ async def get_file_metadata(self, object_key: str) -> Optional[Dict[str, Any]]:
80
+ """Get metadata for a file."""
81
+ try:
82
+ response = self.client.head_object(
83
+ Bucket=self.bucket_name,
84
+ Key=object_key
85
+ )
86
+ return {
87
+ 'content_type': response.get('ContentType'),
88
+ 'content_length': response.get('ContentLength'),
89
+ 'last_modified': response.get('LastModified'),
90
+ 'metadata': response.get('Metadata', {})
91
+ }
92
+ except ClientError as e:
93
+ logger.error(f"Error getting metadata for file {object_key}: {str(e)}")
94
+ return None
95
+
96
+ async def list_files(self, prefix: str = "") -> list:
97
+ """List files in the bucket with optional prefix."""
98
+ try:
99
+ response = self.client.list_objects_v2(
100
+ Bucket=self.bucket_name,
101
+ Prefix=prefix
102
+ )
103
+ return [item['Key'] for item in response.get('Contents', [])]
104
+ except ClientError as e:
105
+ logger.error(f"Error listing files with prefix {prefix}: {str(e)}")
106
+ return []
107
+
108
+ async def generate_presigned_url(self, object_key: str, expiration: int = 3600) -> Optional[str]:
109
+ """Generate a presigned URL for temporary access to a file."""
110
+ try:
111
+ return self.client.generate_presigned_url(
112
+ 'get_object',
113
+ Params={
114
+ 'Bucket': self.bucket_name,
115
+ 'Key': object_key
116
+ },
117
+ ExpiresIn=expiration
118
+ )
119
+ except ClientError as e:
120
+ logger.error(f"Error generating presigned URL for {object_key}: {str(e)}")
121
+ return None
@@ -0,0 +1 @@
1
+ from .connector import QdrantConnector
@@ -0,0 +1,20 @@
1
+ from qdrant_client import AsyncQdrantClient
2
+
3
+ from src.core.connectors import Connector
4
+
5
+
6
+ class QdrantConnector(Connector):
7
+ def __init__(self, url: str, api_key: str = None):
8
+ self.url = url
9
+ self.api_key = api_key
10
+ self.client = None
11
+
12
+ async def connect(self) -> None:
13
+ self.client = AsyncQdrantClient(url=self.url, api_key=self.api_key)
14
+
15
+ async def disconnect(self) -> None:
16
+ # qdrant-client doesn't need explicit disconnect, but close can be added for compatibility
17
+ self.client = None
18
+
19
+ async def is_connected(self) -> bool:
20
+ return self.client is not None
@@ -0,0 +1 @@
1
+ from .sql_connector import SqlConnectorConfig, SqlConnector
@@ -0,0 +1,96 @@
1
+ from sqlalchemy.orm import sessionmaker, scoped_session
2
+ from sqlalchemy import create_engine
3
+
4
+ from src.minix.core.connectors import Connector
5
+
6
+
7
+ class SqlConnectorConfig:
8
+ def __init__(
9
+ self,
10
+ username: str = None,
11
+ password: str = None,
12
+ host: str = None,
13
+ port: int = None,
14
+ database: str = None,
15
+ driver: str = None,
16
+ config_dict: dict = None,
17
+ read_from_dict: bool = False
18
+ ):
19
+ if read_from_dict:
20
+ self.read_from_dict(config_dict)
21
+
22
+ else:
23
+ self.username = username
24
+ self.password = password
25
+ self.host = host
26
+ self.port = port
27
+ self.database = database
28
+ self.driver = driver
29
+
30
+ def read_from_dict(self, config_dict: dict):
31
+ self.username = config_dict.get('username')
32
+ self.password = config_dict.get('password')
33
+ self.host = config_dict.get('host')
34
+ self.port = config_dict.get('port')
35
+ self.database = config_dict.get('database')
36
+ self.driver = config_dict.get('driver')
37
+
38
+ def to_dict(self) -> dict:
39
+ return {
40
+ 'username': self.username,
41
+ 'password': self.password,
42
+ 'host': self.host,
43
+ 'port': self.port,
44
+ 'database': self.database,
45
+ 'driver': self.driver
46
+ }
47
+
48
+ def __str__(self):
49
+ return f'username: {self.username}, password: {self.password}, host: {self.host}, port: {self.port}, database: {self.database}, driver: {self.driver}'
50
+
51
+
52
+ class SqlConnector(Connector):
53
+ def __init__(
54
+ self,
55
+ sql_connector_config: SqlConnectorConfig
56
+ ):
57
+ self.username = sql_connector_config.username
58
+ self.password = sql_connector_config.password
59
+ self.host = sql_connector_config.host
60
+ self.port = sql_connector_config.port
61
+ self.database = sql_connector_config.database
62
+ self.driver = sql_connector_config.driver
63
+ print(self.get_connection_string(self.driver))
64
+ self.engine = create_engine(
65
+ self.get_connection_string(self.driver),
66
+ echo=False,
67
+ pool_pre_ping=True,
68
+ pool_recycle=3600 # Optional: recycle after 1 hour (seconds)
69
+ )
70
+ self.Session = scoped_session(
71
+ sessionmaker(
72
+ bind=self.engine,
73
+ autocommit=False,
74
+ autoflush=False
75
+ )
76
+ )
77
+
78
+ def get_session(self):
79
+ return self.Session()
80
+
81
+ def get_engine(self):
82
+ return self.engine
83
+
84
+ def get_connection_string(self, driver: str) -> str:
85
+ if driver == 'mysql':
86
+ return self.get_mysql_connection_string()
87
+ elif driver == 'clickhouse':
88
+ return self.clickhouse_connection_string()
89
+ else:
90
+ raise Exception('Driver not supported')
91
+
92
+ def get_mysql_connection_string(self) -> str:
93
+ return f'mysql+pymysql://{self.username}:{self.password}@{self.host}:{self.port}/{self.database}'
94
+
95
+ def clickhouse_connection_string(self) -> str:
96
+ return f'clickhouse://{self.username}:{self.password}@{self.host}:{self.port}/{self.database}'
@@ -0,0 +1 @@
1
+ from .async_consumer import AsyncConsumer, AsyncConsumerConfig
@@ -0,0 +1,83 @@
1
+ import asyncio
2
+ import threading
3
+ import json
4
+ from abc import ABC, abstractmethod
5
+ from typing import List
6
+ from aiokafka import AIOKafkaConsumer
7
+ from pydantic import BaseModel
8
+
9
+
10
+ class AsyncConsumerConfig(BaseModel):
11
+ topics: List[str]
12
+ group_id: str
13
+ bootstrap_servers: List[str] | None = None
14
+ name: str
15
+
16
+ class AsyncConsumer(ABC):
17
+ def __init__(self):
18
+ self._thread = None
19
+ self._loop = None
20
+ self._task = None
21
+ self._consumer = None
22
+ self._running = False
23
+
24
+
25
+
26
+ @abstractmethod
27
+ def get_config(self) -> AsyncConsumerConfig:
28
+ """Return Kafka config including topic, bootstrap_servers, group_id"""
29
+ pass
30
+
31
+ @abstractmethod
32
+ async def run(self, message: dict):
33
+ """Process a single Kafka message."""
34
+ pass
35
+
36
+ async def _consume(self):
37
+ config = self.get_config()
38
+ topic = config.pop("topic")
39
+
40
+ self._consumer = AIOKafkaConsumer(
41
+ topic,
42
+ **config.model_dump(),
43
+ value_deserializer=lambda m: json.loads(m.decode("utf-8"))
44
+ )
45
+
46
+ await self._consumer.start()
47
+ self._running = True
48
+ print(f"[{self.__class__.__name__}] Started on topic '{topic}'")
49
+
50
+ try:
51
+ async for msg in self._consumer:
52
+ await self.run(msg.value)
53
+ except asyncio.CancelledError:
54
+ print(f"[{self.__class__.__name__}] Cancelled")
55
+ except Exception as e:
56
+ print(f"[{self.__class__.__name__}] Error: {e}")
57
+ finally:
58
+ await self._consumer.stop()
59
+ self._running = False
60
+ print(f"[{self.__class__.__name__}] Stopped")
61
+
62
+ def start_in_thread(self):
63
+ def thread_target():
64
+ try:
65
+ self._loop = asyncio.new_event_loop()
66
+ asyncio.set_event_loop(self._loop)
67
+ self._task = self._loop.create_task(self._consume())
68
+ self._loop.run_until_complete(self._task)
69
+ except Exception as e:
70
+ print(f"[{self.__class__.__name__}] Fatal thread error: {e}")
71
+ finally:
72
+ self._loop.close()
73
+
74
+ self._thread = threading.Thread(target=thread_target, daemon=True)
75
+ self._thread.start()
76
+
77
+ def stop(self):
78
+ if self._loop and self._task and not self._task.done():
79
+ def shutdown():
80
+ self._task.cancel()
81
+
82
+ self._loop.call_soon_threadsafe(shutdown)
83
+ self._thread.join(timeout=10)
@@ -0,0 +1 @@
1
+ from .controller import Controller
@@ -0,0 +1,32 @@
1
+ from abc import abstractmethod
2
+ from typing import List
3
+ from fastapi import APIRouter
4
+
5
+
6
+ class Controller:
7
+ def __init__(self, tags: List[str] = None):
8
+ self.router = APIRouter(prefix= self.get_prefix(), tags=tags)
9
+ self.define_routes()
10
+
11
+
12
+ @property
13
+ def get_router(self):
14
+ return self.router
15
+
16
+
17
+ @abstractmethod
18
+ def get_prefix(self):
19
+ """
20
+ Returns the prefix for the controller's routes.
21
+ This method should be implemented by subclasses.
22
+ """
23
+ raise NotImplementedError("Subclasses must implement this method.")
24
+
25
+
26
+ @abstractmethod
27
+ def define_routes(self):
28
+ """
29
+ Defines the routes for the controller.
30
+ This method should be implemented by subclasses.
31
+ """
32
+ raise NotImplementedError("Subclasses must implement this method.")
@@ -0,0 +1,4 @@
1
+ from .entity import Entity
2
+ from .sql_entity import SqlEntity
3
+ from .redis_entity import RedisEntity
4
+ from .qdrant_entity import QdrantEntity
@@ -0,0 +1,4 @@
1
+
2
+
3
+ class Entity:
4
+ pass
@@ -0,0 +1,26 @@
1
+ from abc import abstractmethod
2
+ from datetime import datetime
3
+ from typing import List, Dict, Any
4
+
5
+ from pydantic import BaseModel
6
+ from src.minix.core.entity import Entity
7
+
8
+
9
+ class QdrantEntity(Entity, BaseModel):
10
+ id: str
11
+ created_at: datetime
12
+ vector: List[float]
13
+
14
+
15
+
16
+ @property
17
+ def payload(self) -> Dict[str, Any]:
18
+ return vars(self)
19
+
20
+ @staticmethod
21
+ @abstractmethod
22
+ def collection()-> str:
23
+ """
24
+ Returns the name of the Qdrant collection this entity belongs to.
25
+ """
26
+ pass
@@ -0,0 +1,4 @@
1
+ from src.minix.core.entity import Entity
2
+
3
+ class RedisEntity(Entity):
4
+ pass
@@ -0,0 +1,25 @@
1
+ from datetime import datetime
2
+
3
+ from sqlalchemy import Integer, DateTime, func
4
+ from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, MappedAsDataclass
5
+ from src.minix.core.entity import Entity
6
+
7
+ class Base(DeclarativeBase):
8
+ pass
9
+
10
+
11
+ class SqlEntity(Base, Entity):
12
+ __abstract__ = True
13
+ id: Mapped[int] = mapped_column(Integer, primary_key=True)
14
+ created_at : Mapped[datetime] = mapped_column(DateTime, nullable=False, default=func.now(), index=True)
15
+ updated_at : Mapped[datetime] = mapped_column(DateTime, nullable=False, default=func.now(), onupdate=func.now(), index=True)
16
+
17
+ def __repr__(self):
18
+ return f'<{self.__class__.__name__} {self.id}>'
19
+
20
+
21
+
22
+
23
+
24
+
25
+
@@ -0,0 +1 @@
1
+ from .installable import Installable
@@ -0,0 +1,11 @@
1
+ from abc import ABC, abstractmethod
2
+
3
+
4
+ class Installable(ABC):
5
+
6
+ @abstractmethod
7
+ def install(self):
8
+ pass
9
+
10
+
11
+
@@ -0,0 +1,4 @@
1
+ from .model import Model
2
+ from .mlflow_model import MlflowModel
3
+ from .model_registry import ModelRegistry
4
+ from .embedding_model import EmbeddingModel
@@ -0,0 +1,20 @@
1
+ from abc import ABC, abstractmethod
2
+ import numpy as np
3
+ from src.minix.core.model import Model
4
+
5
+
6
+ class EmbeddingModel(Model, ABC):
7
+
8
+ def predict(self, text: str):
9
+ """Predict using the model"""
10
+ return self.embed(text)
11
+
12
+
13
+ @abstractmethod
14
+ def embed(self, text: str) -> np.ndarray:
15
+ raise NotImplementedError("The embed method must be implemented by subclasses.")
16
+
17
+ @abstractmethod
18
+ def embed_batch(self, texts: list[str]) -> np.ndarray:
19
+ """Embed a batch of texts"""
20
+ raise NotImplementedError("The embed_batch method must be implemented by subclasses.")