matrx-orm 1.0.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- matrx_orm/__init__.py +16 -0
- matrx_orm/adapters/__init__.py +0 -0
- matrx_orm/adapters/base_adapter.py +69 -0
- matrx_orm/adapters/postgresql.py +171 -0
- matrx_orm/client/__init__.py +1 -0
- matrx_orm/client/postgres_connection.py +144 -0
- matrx_orm/constants.py +141 -0
- matrx_orm/core/__init__.py +0 -0
- matrx_orm/core/async_db_manager.py +222 -0
- matrx_orm/core/base.py +663 -0
- matrx_orm/core/config.py +366 -0
- matrx_orm/core/expressions.py +22 -0
- matrx_orm/core/extended.py +1205 -0
- matrx_orm/core/fields.py +850 -0
- matrx_orm/core/registry.py +43 -0
- matrx_orm/core/relations.py +260 -0
- matrx_orm/error_handling.py +46 -0
- matrx_orm/exceptions.py +256 -0
- matrx_orm/extended/__init__.py +0 -0
- matrx_orm/extended/app_error_handler.py +101 -0
- matrx_orm/middleware/__init__.py +0 -0
- matrx_orm/middleware/base.py +249 -0
- matrx_orm/operations/__init__.py +0 -0
- matrx_orm/operations/create.py +118 -0
- matrx_orm/operations/delete.py +59 -0
- matrx_orm/operations/read.py +59 -0
- matrx_orm/operations/update.py +165 -0
- matrx_orm/python_sql/__init__.py +0 -0
- matrx_orm/python_sql/db_objects.py +389 -0
- matrx_orm/python_sql/table_detailed_relationships.py +498 -0
- matrx_orm/python_sql/table_typescript_relationship.py +180 -0
- matrx_orm/query/__init__.py +0 -0
- matrx_orm/query/builder.py +249 -0
- matrx_orm/query/executor.py +376 -0
- matrx_orm/schema_builder/__init__.py +1 -0
- matrx_orm/schema_builder/generator.py +149 -0
- matrx_orm/schema_builder/helpers/__init__.py +0 -0
- matrx_orm/schema_builder/helpers/configs.py +0 -0
- matrx_orm/schema_builder/helpers/git_checker.py +118 -0
- matrx_orm/schema_builder/helpers/manager_dto_creator.py +435 -0
- matrx_orm/schema_builder/helpers/manager_helpers.py +22 -0
- matrx_orm/schema_builder/helpers/manual_overrides.py +144 -0
- matrx_orm/schema_builder/individual_managers/__init__.py +0 -0
- matrx_orm/schema_builder/individual_managers/columns.py +1195 -0
- matrx_orm/schema_builder/individual_managers/common.py +16 -0
- matrx_orm/schema_builder/individual_managers/relationships.py +57 -0
- matrx_orm/schema_builder/individual_managers/schema.py +722 -0
- matrx_orm/schema_builder/individual_managers/tables.py +1116 -0
- matrx_orm/schema_builder/individual_managers/views.py +94 -0
- matrx_orm/schema_builder/parts_generators/__init__.py +0 -0
- matrx_orm/schema_builder/parts_generators/entity_field_override_generator.py +137 -0
- matrx_orm/schema_builder/parts_generators/entity_main_hook_generator.py +242 -0
- matrx_orm/schema_builder/parts_generators/entity_override_generator.py +51 -0
- matrx_orm/schema_builder/schema_manager.py +770 -0
- matrx_orm/sql_executor/__init__.py +20 -0
- matrx_orm/sql_executor/executor.py +164 -0
- matrx_orm/sql_executor/queries.py +118 -0
- matrx_orm/sql_executor/registry.py +52 -0
- matrx_orm/sql_executor/types.py +18 -0
- matrx_orm/sql_executor/utils.py +90 -0
- matrx_orm/state.py +466 -0
- matrx_orm/structure.md +110 -0
- matrx_orm/utils/__init__.py +0 -0
- matrx_orm/utils/sql_utils.py +57 -0
- matrx_orm/utils/type_converters.py +101 -0
- matrx_orm-1.0.4.dist-info/METADATA +93 -0
- matrx_orm-1.0.4.dist-info/RECORD +69 -0
- matrx_orm-1.0.4.dist-info/WHEEL +4 -0
- matrx_orm-1.0.4.dist-info/entry_points.txt +2 -0
matrx_orm/__init__.py
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from .core.config import DatabaseProjectConfig, register_database, get_database_config, get_connection_string, \
|
|
2
|
+
get_manager_config, get_code_config, get_all_database_project_names, get_database_alias, get_all_database_projects_redacted
|
|
3
|
+
|
|
4
|
+
from .core.extended import BaseManager, BaseDTO
|
|
5
|
+
from .core.base import Model
|
|
6
|
+
from .core.registry import model_registry
|
|
7
|
+
from .core.fields import (CharField, EnumField, DateField, TextField, IntegerField, FloatField, BooleanField,
|
|
8
|
+
DateTimeField, UUIDField, JSONField, DecimalField, BigIntegerField, SmallIntegerField,
|
|
9
|
+
JSONBField, UUIDArrayField, JSONBArrayField, ForeignKey)
|
|
10
|
+
|
|
11
|
+
__all__ = ["DatabaseProjectConfig", "register_database", "get_database_config", "get_connection_string",
|
|
12
|
+
"get_manager_config", "get_code_config", "get_all_database_project_names", "get_default_code_config",
|
|
13
|
+
"BaseManager", "BaseDTO", "Model", "model_registry", "CharField", "EnumField", "DateField", "TextField",
|
|
14
|
+
"IntegerField", "FloatField", "BooleanField", "DateTimeField", "UUIDField", "JSONField", "DecimalField",
|
|
15
|
+
"BigIntegerField", "SmallIntegerField", "JSONBField", "UUIDArrayField", "JSONBArrayField", "ForeignKey", "get_database_alias",
|
|
16
|
+
"get_all_database_projects_redacted"]
|
|
File without changes
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
from typing import Dict, Any, List, Union
|
|
3
|
+
from types import TracebackType
|
|
4
|
+
from typing import Type, Optional
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class BaseAdapter(ABC):
|
|
8
|
+
@abstractmethod
|
|
9
|
+
async def execute_query(self, query: Dict[str, Any]) -> List[Dict[str, Any]]:
|
|
10
|
+
pass
|
|
11
|
+
|
|
12
|
+
@abstractmethod
|
|
13
|
+
async def fetch(self, query: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
|
14
|
+
pass
|
|
15
|
+
|
|
16
|
+
@abstractmethod
|
|
17
|
+
async def fetch_by_id(self, model: Any, record_id: Union[str, int]) -> Optional[Dict[str, Any]]:
|
|
18
|
+
pass
|
|
19
|
+
|
|
20
|
+
@abstractmethod
|
|
21
|
+
async def count(self, query: Dict[str, Any]) -> int:
|
|
22
|
+
pass
|
|
23
|
+
|
|
24
|
+
@abstractmethod
|
|
25
|
+
async def exists(self, query: Dict[str, Any]) -> bool:
|
|
26
|
+
pass
|
|
27
|
+
|
|
28
|
+
@abstractmethod
|
|
29
|
+
async def insert(self, query: Dict[str, Any]) -> Dict[str, Any]:
|
|
30
|
+
pass
|
|
31
|
+
|
|
32
|
+
@abstractmethod
|
|
33
|
+
async def bulk_insert(self, query: Dict[str, Any]) -> List[Dict[str, Any]]:
|
|
34
|
+
pass
|
|
35
|
+
|
|
36
|
+
@abstractmethod
|
|
37
|
+
async def update(self, query: Dict[str, Any], data: Dict[str, Any]) -> int:
|
|
38
|
+
pass
|
|
39
|
+
|
|
40
|
+
@abstractmethod
|
|
41
|
+
async def bulk_update(self, query: Dict[str, Any]) -> int:
|
|
42
|
+
pass
|
|
43
|
+
|
|
44
|
+
@abstractmethod
|
|
45
|
+
async def delete(self, query: Dict[str, Any]) -> int:
|
|
46
|
+
pass
|
|
47
|
+
|
|
48
|
+
@abstractmethod
|
|
49
|
+
async def raw_sql(self, sql: str, params: List[Any] = None) -> Union[List[Dict[str, Any]], int]:
|
|
50
|
+
pass
|
|
51
|
+
|
|
52
|
+
@abstractmethod
|
|
53
|
+
async def transaction(self):
|
|
54
|
+
pass
|
|
55
|
+
|
|
56
|
+
@abstractmethod
|
|
57
|
+
async def close(self):
|
|
58
|
+
pass
|
|
59
|
+
|
|
60
|
+
async def __aenter__(self):
|
|
61
|
+
return self
|
|
62
|
+
|
|
63
|
+
async def __aexit__(
|
|
64
|
+
self,
|
|
65
|
+
exc_type: Optional[Type[BaseException]],
|
|
66
|
+
exc_val: Optional[BaseException],
|
|
67
|
+
exc_tb: Optional[TracebackType],
|
|
68
|
+
):
|
|
69
|
+
await self.close()
|
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
from typing import Dict, Any, List, Union, Tuple
|
|
2
|
+
import asyncpg
|
|
3
|
+
from matrx_orm.adapters.base_adapter import BaseAdapter
|
|
4
|
+
from matrx_orm import get_database_config
|
|
5
|
+
from matrx_utils.conf import settings
|
|
6
|
+
|
|
7
|
+
class PostgreSQLAdapter(BaseAdapter):
|
|
8
|
+
def __init__(self):
|
|
9
|
+
|
|
10
|
+
self.config = get_database_config()
|
|
11
|
+
self.connection = None
|
|
12
|
+
self.current_database = settings.DEFAULT_DB_PROJECT
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
async def _get_connection(self):
|
|
16
|
+
if self.connection is None or self.connection.is_closed():
|
|
17
|
+
self.connection = await asyncpg.connect(
|
|
18
|
+
host=self.config.host,
|
|
19
|
+
port=self.config.port,
|
|
20
|
+
database=self.config.name,
|
|
21
|
+
user=self.config.user,
|
|
22
|
+
password=self.config.password,
|
|
23
|
+
)
|
|
24
|
+
return self.connection
|
|
25
|
+
|
|
26
|
+
async def execute_query(self, query: Dict[str, Any]) -> List[Dict[str, Any]]:
|
|
27
|
+
sql, params = self._build_sql(query)
|
|
28
|
+
connection = await self._get_connection()
|
|
29
|
+
rows = await connection.fetch(sql, *params)
|
|
30
|
+
return [dict(row) for row in rows]
|
|
31
|
+
|
|
32
|
+
async def count(self, query: Dict[str, Any]) -> int:
|
|
33
|
+
sql, params = self._build_count_sql(query)
|
|
34
|
+
connection = await self._get_connection()
|
|
35
|
+
row = await connection.fetchrow(sql, *params)
|
|
36
|
+
return row["count"]
|
|
37
|
+
|
|
38
|
+
async def exists(self, query: Dict[str, Any]) -> bool:
|
|
39
|
+
sql, params = self._build_exists_sql(query)
|
|
40
|
+
connection = await self._get_connection()
|
|
41
|
+
row = await connection.fetchrow(sql, *params)
|
|
42
|
+
return row["exists"]
|
|
43
|
+
|
|
44
|
+
async def insert(self, query: Dict[str, Any]) -> Dict[str, Any]:
|
|
45
|
+
sql, params = self._build_insert_sql(query)
|
|
46
|
+
connection = await self._get_connection()
|
|
47
|
+
row = await connection.fetchrow(sql, *params)
|
|
48
|
+
return dict(row)
|
|
49
|
+
|
|
50
|
+
async def bulk_insert(self, query: Dict[str, Any]) -> List[Dict[str, Any]]:
|
|
51
|
+
sql, params = self._build_bulk_insert_sql(query)
|
|
52
|
+
connection = await self._get_connection()
|
|
53
|
+
await connection.executemany(sql, params)
|
|
54
|
+
return await connection.fetch(sql, *params)
|
|
55
|
+
|
|
56
|
+
async def update(self, query: Dict[str, Any], data: Dict[str, Any]) -> int:
|
|
57
|
+
sql, params = self._build_update_sql(query, data)
|
|
58
|
+
connection = await self._get_connection()
|
|
59
|
+
result = await connection.execute(sql, *params)
|
|
60
|
+
return result
|
|
61
|
+
|
|
62
|
+
async def bulk_update(self, query: Dict[str, Any]) -> int:
|
|
63
|
+
sql, params = self._build_bulk_update_sql(query)
|
|
64
|
+
connection = await self._get_connection()
|
|
65
|
+
result = await connection.executemany(sql, params)
|
|
66
|
+
return result
|
|
67
|
+
|
|
68
|
+
async def delete(self, query: Dict[str, Any]) -> int:
|
|
69
|
+
sql, params = self._build_delete_sql(query)
|
|
70
|
+
connection = await self._get_connection()
|
|
71
|
+
result = await connection.execute(sql, *params)
|
|
72
|
+
return result
|
|
73
|
+
|
|
74
|
+
async def raw_sql(self, sql: str, params: List[Any] = None) -> Union[List[Dict[str, Any]], int]:
|
|
75
|
+
connection = await self._get_connection()
|
|
76
|
+
if sql.strip().upper().startswith("SELECT"):
|
|
77
|
+
rows = await connection.fetch(sql, *params)
|
|
78
|
+
return [dict(row) for row in rows]
|
|
79
|
+
else:
|
|
80
|
+
return await connection.execute(sql, *params)
|
|
81
|
+
|
|
82
|
+
async def close(self):
|
|
83
|
+
if self.connection is not None and not self.connection.is_closed():
|
|
84
|
+
await self.connection.close()
|
|
85
|
+
|
|
86
|
+
async def transaction(self):
|
|
87
|
+
connection = await self._get_connection()
|
|
88
|
+
async with connection.transaction():
|
|
89
|
+
yield
|
|
90
|
+
|
|
91
|
+
async def savepoint(self, name: str):
|
|
92
|
+
connection = await self._get_connection()
|
|
93
|
+
await connection.execute(f"SAVEPOINT {name}")
|
|
94
|
+
|
|
95
|
+
async def rollback_to_savepoint(self, name: str):
|
|
96
|
+
connection = await self._get_connection()
|
|
97
|
+
await connection.execute(f"ROLLBACK TO SAVEPOINT {name}")
|
|
98
|
+
|
|
99
|
+
async def release_savepoint(self, name: str):
|
|
100
|
+
connection = await self._get_connection()
|
|
101
|
+
await connection.execute(f"RELEASE SAVEPOINT {name}")
|
|
102
|
+
|
|
103
|
+
def _build_sql(self, query: Dict[str, Any]) -> Tuple[str, List[Any]]:
|
|
104
|
+
# Generic SQL building logic for select statements
|
|
105
|
+
sql = f"SELECT * FROM {query['model'].__tablename__} WHERE "
|
|
106
|
+
filters = []
|
|
107
|
+
params = []
|
|
108
|
+
for field, value in query["filters"].items():
|
|
109
|
+
filters.append(f"{field} = ${len(params) + 1}")
|
|
110
|
+
params.append(value)
|
|
111
|
+
sql += " AND ".join(filters)
|
|
112
|
+
return sql, params
|
|
113
|
+
|
|
114
|
+
def _build_count_sql(self, query: Dict[str, Any]) -> Tuple[str, List[Any]]:
|
|
115
|
+
sql = f"SELECT COUNT(*) FROM {query['model'].__tablename__} WHERE "
|
|
116
|
+
filters = []
|
|
117
|
+
params = []
|
|
118
|
+
for field, value in query["filters"].items():
|
|
119
|
+
filters.append(f"{field} = ${len(params) + 1}")
|
|
120
|
+
params.append(value)
|
|
121
|
+
sql += " AND ".join(filters)
|
|
122
|
+
return sql, params
|
|
123
|
+
|
|
124
|
+
def _build_exists_sql(self, query: Dict[str, Any]) -> Tuple[str, List[Any]]:
|
|
125
|
+
sql = f"SELECT EXISTS(SELECT 1 FROM {query['model'].__tablename__} WHERE "
|
|
126
|
+
filters = []
|
|
127
|
+
params = []
|
|
128
|
+
for field, value in query["filters"].items():
|
|
129
|
+
filters.append(f"{field} = ${len(params) + 1}")
|
|
130
|
+
params.append(value)
|
|
131
|
+
sql += " AND ".join(filters) + ")"
|
|
132
|
+
return sql, params
|
|
133
|
+
|
|
134
|
+
def _build_insert_sql(self, query: Dict[str, Any]) -> Tuple[str, List[Any]]:
|
|
135
|
+
fields = ", ".join(query["data"].keys())
|
|
136
|
+
placeholders = ", ".join([f"${i + 1}" for i in range(len(query["data"]))])
|
|
137
|
+
sql = f"INSERT INTO {query['model'].__tablename__} ({fields}) VALUES ({placeholders}) RETURNING *"
|
|
138
|
+
params = list(query["data"].values())
|
|
139
|
+
return sql, params
|
|
140
|
+
|
|
141
|
+
def _build_bulk_insert_sql(self, query: Dict[str, Any]) -> Tuple[str, List[Any]]:
|
|
142
|
+
fields = ", ".join(query["data"][0].keys())
|
|
143
|
+
placeholders = ", ".join([f"${i + 1}" for i in range(len(query["data"][0]))])
|
|
144
|
+
sql = f"INSERT INTO {query['model'].__tablename__} ({fields}) VALUES ({placeholders})"
|
|
145
|
+
params = [tuple(row.values()) for row in query["data"]]
|
|
146
|
+
return sql, params
|
|
147
|
+
|
|
148
|
+
def _build_update_sql(self, query: Dict[str, Any], data: Dict[str, Any]) -> Tuple[str, List[Any]]:
|
|
149
|
+
set_clause = ", ".join([f"{key} = ${i + 1}" for i, key in enumerate(data.keys())])
|
|
150
|
+
sql = f"UPDATE {query['model'].__tablename__} SET {set_clause} WHERE "
|
|
151
|
+
filters = []
|
|
152
|
+
params = list(data.values())
|
|
153
|
+
for field, value in query["filters"].items():
|
|
154
|
+
filters.append(f"{field} = ${len(params) + 1}")
|
|
155
|
+
params.append(value)
|
|
156
|
+
sql += " AND ".join(filters)
|
|
157
|
+
return sql, params
|
|
158
|
+
|
|
159
|
+
def _build_bulk_update_sql(self, query: Dict[str, Any]) -> Tuple[str, List[Any]]:
|
|
160
|
+
# Similar to update but for bulk updates
|
|
161
|
+
raise NotImplementedError("Bulk update logic to be implemented")
|
|
162
|
+
|
|
163
|
+
def _build_delete_sql(self, query: Dict[str, Any]) -> Tuple[str, List[Any]]:
|
|
164
|
+
sql = f"DELETE FROM {query['model'].__tablename__} WHERE "
|
|
165
|
+
filters = []
|
|
166
|
+
params = []
|
|
167
|
+
for field, value in query["filters"].items():
|
|
168
|
+
filters.append(f"{field} = ${len(params) + 1}")
|
|
169
|
+
params.append(value)
|
|
170
|
+
sql += " AND ".join(filters)
|
|
171
|
+
return sql, params
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .postgres_connection import get_postgres_connection
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
from typing import Any, Dict, List
|
|
4
|
+
|
|
5
|
+
from psycopg2 import pool
|
|
6
|
+
from psycopg2.extras import RealDictCursor
|
|
7
|
+
from matrx_utils import vcprint
|
|
8
|
+
|
|
9
|
+
from matrx_orm.utils.sql_utils import sql_param_to_psycopg2
|
|
10
|
+
from matrx_orm import get_database_config
|
|
11
|
+
|
|
12
|
+
connection_pools = {}
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def init_connection_details(config_name):
|
|
16
|
+
global connection_pools
|
|
17
|
+
|
|
18
|
+
if config_name not in connection_pools:
|
|
19
|
+
config = get_database_config(config_name=config_name)
|
|
20
|
+
vcprint(f"\n[Matrx ORM] Using configuration for: {config_name}\n", color="green")
|
|
21
|
+
|
|
22
|
+
db_host = config.get("host")
|
|
23
|
+
db_port = config.get("port")
|
|
24
|
+
db_name = config.get("database_name")
|
|
25
|
+
db_user = config.get("user")
|
|
26
|
+
db_password = config.get("password")
|
|
27
|
+
|
|
28
|
+
if not all([db_host, db_port, db_name, db_user, db_password]):
|
|
29
|
+
raise ValueError(
|
|
30
|
+
f"Incomplete database configuration for '{config_name}'. " "Please check your environment variables or settings.")
|
|
31
|
+
|
|
32
|
+
connection_string = f"postgresql://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
|
|
33
|
+
|
|
34
|
+
vcprint(f"\n[Matrx ORM] Connection String:\n{connection_string}\n", color="green")
|
|
35
|
+
|
|
36
|
+
connection_pools[config_name] = pool.SimpleConnectionPool(1, 10, dsn=connection_string, sslmode="require")
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def get_postgres_connection(
|
|
40
|
+
database_project="this_will_cause_error_specify_the_database",
|
|
41
|
+
):
|
|
42
|
+
init_connection_details(database_project)
|
|
43
|
+
conn = connection_pools[database_project].getconn()
|
|
44
|
+
return conn
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def execute_sql_query(query, params=None, database_project="this_will_cause_error_specify_the_database"):
|
|
48
|
+
"""
|
|
49
|
+
Executes a SQL query and returns the results as a list of dictionaries.
|
|
50
|
+
"""
|
|
51
|
+
conn = get_postgres_connection(database_project)
|
|
52
|
+
try:
|
|
53
|
+
with conn.cursor(cursor_factory=RealDictCursor) as cur:
|
|
54
|
+
if params and isinstance(params, dict):
|
|
55
|
+
query, params = sql_param_to_psycopg2(query, params)
|
|
56
|
+
cur.execute(query, params)
|
|
57
|
+
return cur.fetchall()
|
|
58
|
+
finally:
|
|
59
|
+
connection_pools[database_project].putconn(conn)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def execute_sql_file(filename, params=None, database_project="this_will_cause_error_specify_the_database"):
|
|
63
|
+
"""
|
|
64
|
+
Executes a SQL query from a file and returns the results.
|
|
65
|
+
"""
|
|
66
|
+
sql_dir = os.path.join(os.path.dirname(__file__), "sql")
|
|
67
|
+
with open(os.path.join(sql_dir, filename), "r") as sql_file:
|
|
68
|
+
query = sql_file.read()
|
|
69
|
+
|
|
70
|
+
if params:
|
|
71
|
+
query, params = sql_param_to_psycopg2(query, params)
|
|
72
|
+
|
|
73
|
+
vcprint(f"Executing query:\n{query}\n", color="green")
|
|
74
|
+
vcprint(f"With params: {params}\n", color="green")
|
|
75
|
+
|
|
76
|
+
return execute_sql_query(query, params, database_project)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def execute_transaction_query(query, params=None, database_project="this_will_cause_error_specify_the_database"):
|
|
80
|
+
"""
|
|
81
|
+
Executes a SQL query within a transaction, commits it, and returns any results.
|
|
82
|
+
Suitable for INSERT/UPDATE/DELETE operations that may or may not return values.
|
|
83
|
+
"""
|
|
84
|
+
conn = get_postgres_connection(database_project)
|
|
85
|
+
try:
|
|
86
|
+
with conn.cursor(cursor_factory=RealDictCursor) as cur:
|
|
87
|
+
if params and isinstance(params, dict):
|
|
88
|
+
query, params = sql_param_to_psycopg2(query, params)
|
|
89
|
+
cur.execute(query, params)
|
|
90
|
+
conn.commit() # Explicitly commit the transaction
|
|
91
|
+
|
|
92
|
+
# Try to fetch results if any are available
|
|
93
|
+
try:
|
|
94
|
+
return cur.fetchall()
|
|
95
|
+
except:
|
|
96
|
+
# If no results to fetch, return an empty list instead of raising an error
|
|
97
|
+
return []
|
|
98
|
+
finally:
|
|
99
|
+
connection_pools[database_project].putconn(conn)
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def execute_batch_query(query: str, batch_params: List[Dict[str, Any]], batch_size: int = 50,
|
|
103
|
+
database_project="supabase_automation_matrix"):
|
|
104
|
+
"""
|
|
105
|
+
Executes a SQL query with batched parameters.
|
|
106
|
+
"""
|
|
107
|
+
conn = get_postgres_connection(database_project)
|
|
108
|
+
all_results = []
|
|
109
|
+
|
|
110
|
+
try:
|
|
111
|
+
# Process in batches
|
|
112
|
+
for i in range(0, len(batch_params), batch_size):
|
|
113
|
+
batch = batch_params[i: i + batch_size]
|
|
114
|
+
vcprint(f"Processing batch {i // batch_size + 1}/{(len(batch_params) + batch_size - 1) // batch_size}",
|
|
115
|
+
color="blue")
|
|
116
|
+
|
|
117
|
+
# Process each row individually within the batch
|
|
118
|
+
for idx, row_params in enumerate(batch):
|
|
119
|
+
# Handle JSONB serialization properly
|
|
120
|
+
processed_params = {}
|
|
121
|
+
for key, value in row_params.items():
|
|
122
|
+
if key == "data" and isinstance(value, dict):
|
|
123
|
+
# Convert dict to JSONB-compatible string
|
|
124
|
+
processed_params[key] = json.dumps(value)
|
|
125
|
+
else:
|
|
126
|
+
processed_params[key] = value
|
|
127
|
+
|
|
128
|
+
# Execute the query for this row
|
|
129
|
+
with conn.cursor(cursor_factory=RealDictCursor) as cur:
|
|
130
|
+
if processed_params:
|
|
131
|
+
query_with_names, params = sql_param_to_psycopg2(query, processed_params)
|
|
132
|
+
cur.execute(query_with_names, params)
|
|
133
|
+
conn.commit()
|
|
134
|
+
try:
|
|
135
|
+
result = cur.fetchall()
|
|
136
|
+
if result:
|
|
137
|
+
all_results.extend(result)
|
|
138
|
+
except:
|
|
139
|
+
# No results to fetch
|
|
140
|
+
pass
|
|
141
|
+
finally:
|
|
142
|
+
connection_pools[database_project].putconn(conn)
|
|
143
|
+
|
|
144
|
+
return all_results
|
matrx_orm/constants.py
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
def get_default_component_props():
|
|
2
|
+
return {
|
|
3
|
+
"subComponent": "default",
|
|
4
|
+
"variant": "default",
|
|
5
|
+
"section": "default",
|
|
6
|
+
"placeholder": "default",
|
|
7
|
+
"size": "default",
|
|
8
|
+
"textSize": "default",
|
|
9
|
+
"textColor": "default",
|
|
10
|
+
"rows": "default",
|
|
11
|
+
"animation": "default",
|
|
12
|
+
"fullWidthValue": "default",
|
|
13
|
+
"fullWidth": "default",
|
|
14
|
+
"disabled": "default",
|
|
15
|
+
"className": "default",
|
|
16
|
+
"type": "default",
|
|
17
|
+
"onChange": "default",
|
|
18
|
+
"onBlur": "default",
|
|
19
|
+
"formatString": "default",
|
|
20
|
+
"min": "default",
|
|
21
|
+
"max": "default",
|
|
22
|
+
"step": "default",
|
|
23
|
+
"numberType": "default",
|
|
24
|
+
"options": "default",
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
# Method to generate the AutomationSchema
|
|
29
|
+
def generate_automation_schema(): # TODO: Currently not used.
|
|
30
|
+
lines = [
|
|
31
|
+
"export type AutomationSchema = {",
|
|
32
|
+
" [tableName in AutomationTableName]: {",
|
|
33
|
+
" entityNameFormats: {",
|
|
34
|
+
" frontend: string;",
|
|
35
|
+
" backend: string;",
|
|
36
|
+
" database: string;",
|
|
37
|
+
" pretty: string;",
|
|
38
|
+
" component: string;",
|
|
39
|
+
" kebab: string;",
|
|
40
|
+
" [key: string]: string;",
|
|
41
|
+
" };",
|
|
42
|
+
" schemaType: 'table' | 'view' | 'dynamic' | 'other';",
|
|
43
|
+
" entityFields: {",
|
|
44
|
+
" [fieldName: string]: {",
|
|
45
|
+
" fieldNameFormats: {",
|
|
46
|
+
" frontend: string;",
|
|
47
|
+
" backend: string;",
|
|
48
|
+
" database: string;",
|
|
49
|
+
" pretty: string;",
|
|
50
|
+
" component: string;",
|
|
51
|
+
" kebab: string;",
|
|
52
|
+
" [key: string]: string;",
|
|
53
|
+
" };",
|
|
54
|
+
" dataType: DataType;",
|
|
55
|
+
" isRequired?: boolean;",
|
|
56
|
+
" maxLength?: number | null;",
|
|
57
|
+
" isArray?: boolean;",
|
|
58
|
+
" defaultValue?: any;",
|
|
59
|
+
" isPrimaryKey?: boolean;",
|
|
60
|
+
" defaultGeneratorFunction?: string | null;",
|
|
61
|
+
" validationFunctions?: string[];",
|
|
62
|
+
" exclusionRules?: string[];",
|
|
63
|
+
" defaultComponent?: string;",
|
|
64
|
+
" structure: 'single' | 'array' | 'object' | 'foreignKey' | 'inverseForeignKey' | 'manyToMany';",
|
|
65
|
+
" isNative: boolean;",
|
|
66
|
+
" typeReference: TypeBrand<any>;",
|
|
67
|
+
" databaseTable: string;",
|
|
68
|
+
" };",
|
|
69
|
+
" };",
|
|
70
|
+
" defaultFetchStrategy: 'simple' | 'fk' | 'ifk' | 'm2m' | 'fkAndIfk' | 'm2mAndFk' | 'm2mAndIfk' | 'fkIfkAndM2M';",
|
|
71
|
+
" relationships: Array<{",
|
|
72
|
+
" relationshipType: 'foreignKey' | 'inverseForeignKey' | 'manyToMany';",
|
|
73
|
+
" column: string;",
|
|
74
|
+
" relatedTable: string;",
|
|
75
|
+
" relatedColumn: string;",
|
|
76
|
+
" junctionTable: string | null;",
|
|
77
|
+
" }>;",
|
|
78
|
+
" };",
|
|
79
|
+
"};",
|
|
80
|
+
]
|
|
81
|
+
|
|
82
|
+
return "\n".join(lines)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def get_relationship_data_model_types():
|
|
86
|
+
ts_code_content = """
|
|
87
|
+
|
|
88
|
+
import { AnyEntityDatabaseTable, EntityKeys } from "@/types";
|
|
89
|
+
|
|
90
|
+
export type EntityRelationshipType =
|
|
91
|
+
| "self-referential"
|
|
92
|
+
| "one-to-one"
|
|
93
|
+
| "one-to-many"
|
|
94
|
+
| "many-to-one"
|
|
95
|
+
| "many-to-many";
|
|
96
|
+
|
|
97
|
+
export type ForeignKeyDetails = {
|
|
98
|
+
foreignTable: AnyEntityDatabaseTable;
|
|
99
|
+
foreignEntity: EntityKeys;
|
|
100
|
+
column: string;
|
|
101
|
+
fieldName: string;
|
|
102
|
+
foreignField: string;
|
|
103
|
+
foreignColumn: string;
|
|
104
|
+
relationshipType: EntityRelationshipType;
|
|
105
|
+
constraintName: string;
|
|
106
|
+
};
|
|
107
|
+
|
|
108
|
+
export type ReferencedByDetails = {
|
|
109
|
+
foreignTable: AnyEntityDatabaseTable;
|
|
110
|
+
foreignEntity: EntityKeys;
|
|
111
|
+
field: string;
|
|
112
|
+
column: string;
|
|
113
|
+
foreignField: string;
|
|
114
|
+
foreignColumn: string;
|
|
115
|
+
constraintName: string;
|
|
116
|
+
};
|
|
117
|
+
|
|
118
|
+
export type RelationshipDetails = {
|
|
119
|
+
entityName: EntityKeys;
|
|
120
|
+
tableName: AnyEntityDatabaseTable;
|
|
121
|
+
foreignKeys: Partial<Record<EntityKeys, ForeignKeyDetails>> | Record<string, never>;
|
|
122
|
+
referencedBy: Partial<Record<EntityKeys, ReferencedByDetails>> | Record<string, never>;
|
|
123
|
+
};
|
|
124
|
+
|
|
125
|
+
export type FullEntityRelationships = {
|
|
126
|
+
selfReferential: EntityKeys[];
|
|
127
|
+
manyToMany: EntityKeys[];
|
|
128
|
+
oneToOne: EntityKeys[];
|
|
129
|
+
manyToOne: EntityKeys[];
|
|
130
|
+
oneToMany: EntityKeys[];
|
|
131
|
+
undefined: EntityKeys[];
|
|
132
|
+
inverseReferences: EntityKeys[];
|
|
133
|
+
relationshipDetails: RelationshipDetails;
|
|
134
|
+
};
|
|
135
|
+
|
|
136
|
+
export const asEntityRelationships = (data: any): Record<EntityKeys, FullEntityRelationships> => {
|
|
137
|
+
return data as Record<EntityKeys, FullEntityRelationships>;
|
|
138
|
+
};
|
|
139
|
+
|
|
140
|
+
"""
|
|
141
|
+
return ts_code_content
|
|
File without changes
|