sqloader 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqloader-0.1.0/LICENSE +21 -0
- sqloader-0.1.0/PKG-INFO +92 -0
- sqloader-0.1.0/README.md +69 -0
- sqloader-0.1.0/pyproject.toml +28 -0
- sqloader-0.1.0/setup.cfg +4 -0
- sqloader-0.1.0/setup.py +28 -0
- sqloader-0.1.0/sqloader/__init__.py +6 -0
- sqloader-0.1.0/sqloader/_prototype.py +101 -0
- sqloader-0.1.0/sqloader/init.py +69 -0
- sqloader-0.1.0/sqloader/migrator.py +95 -0
- sqloader-0.1.0/sqloader/mysql.py +288 -0
- sqloader-0.1.0/sqloader/sqlite3.py +218 -0
- sqloader-0.1.0/sqloader/sqloader.py +51 -0
- sqloader-0.1.0/sqloader.egg-info/PKG-INFO +92 -0
- sqloader-0.1.0/sqloader.egg-info/SOURCES.txt +16 -0
- sqloader-0.1.0/sqloader.egg-info/dependency_links.txt +1 -0
- sqloader-0.1.0/sqloader.egg-info/requires.txt +2 -0
- sqloader-0.1.0/sqloader.egg-info/top_level.txt +1 -0
sqloader-0.1.0/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 horrible-gh
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
sqloader-0.1.0/PKG-INFO
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: sqloader
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: A simple and extensible SQL migration and loader utility for Python.
|
|
5
|
+
Home-page: https://github.com/horrible-gh/LogAssist.git
|
|
6
|
+
Author: horrible-gh
|
|
7
|
+
Author-email: horrible <shinjpn1@gmail.com>
|
|
8
|
+
Project-URL: Homepage, https://github.com/horrible-gh/sqloader
|
|
9
|
+
Project-URL: Bug Tracker, https://github.com/horrible-gh/sqloader/issues
|
|
10
|
+
Keywords: sqloader,sql loader,migration,database migration,MySQL,SQLite,SQL migration,schema management,json sql loader
|
|
11
|
+
Classifier: Programming Language :: Python :: 3
|
|
12
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
13
|
+
Classifier: Operating System :: OS Independent
|
|
14
|
+
Requires-Python: >=3.6
|
|
15
|
+
Description-Content-Type: text/markdown
|
|
16
|
+
License-File: LICENSE
|
|
17
|
+
Requires-Dist: LogAssist
|
|
18
|
+
Requires-Dist: pymysql
|
|
19
|
+
Dynamic: author
|
|
20
|
+
Dynamic: home-page
|
|
21
|
+
Dynamic: license-file
|
|
22
|
+
Dynamic: requires-python
|
|
23
|
+
|
|
24
|
+
# sqloader
|
|
25
|
+
|
|
26
|
+
A lightweight Python utility for managing SQL migrations and loading SQL from JSON or .sql files.
|
|
27
|
+
Supports common relational databases and is designed for simple, clean integration with any Python backend (e.g., FastAPI).
|
|
28
|
+
|
|
29
|
+
---
|
|
30
|
+
|
|
31
|
+
## Installation
|
|
32
|
+
|
|
33
|
+
```powershell
|
|
34
|
+
pip install sqloader
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
## Features
|
|
38
|
+
|
|
39
|
+
- ✅ Easy database migration management
|
|
40
|
+
- ✅ Load SQL queries from `.json` or `.sql` files
|
|
41
|
+
- ✅ Supports MySQL and SQLite
|
|
42
|
+
- ✅ Clean API for integration
|
|
43
|
+
- ✅ Lightweight and dependency-minimized
|
|
44
|
+
|
|
45
|
+
## Quickstart
|
|
46
|
+
|
|
47
|
+
```python
|
|
48
|
+
from sqloader.init import database_init
|
|
49
|
+
|
|
50
|
+
config = {
|
|
51
|
+
"type": "mysql",
|
|
52
|
+
"mysql": {
|
|
53
|
+
"host": "localhost",
|
|
54
|
+
"port": 3306,
|
|
55
|
+
"user": "root",
|
|
56
|
+
"password": "pass",
|
|
57
|
+
"database": "mydb"
|
|
58
|
+
},
|
|
59
|
+
"service": {
|
|
60
|
+
"sqloder": "res/sql/sqloader/mysql"
|
|
61
|
+
},
|
|
62
|
+
"migration": {
|
|
63
|
+
"auto_migration": True,
|
|
64
|
+
"migration_path": "res/sql/migration/mysql"
|
|
65
|
+
},
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
db, sqloader, migrator = database_init(config)
|
|
69
|
+
|
|
70
|
+
# Example usage
|
|
71
|
+
query = sqloader.load_sql("user_info", "user.get_user_by_id")
|
|
72
|
+
result = db.select_one(query, (123,))
|
|
73
|
+
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
## SQL Loading Behavior
|
|
77
|
+
|
|
78
|
+
- If the value in the .json file ends with .sql, the referenced file will be loaded from the same directory.
|
|
79
|
+
- Otherwise, the value is treated as a raw SQL string.
|
|
80
|
+
|
|
81
|
+
Example JSON file user.json:
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
```json
|
|
85
|
+
{
|
|
86
|
+
"user": {
|
|
87
|
+
"get_user_by_id": "SELECT * FROM users WHERE id = %s",
|
|
88
|
+
"get_all_users": "user_all.sql"
|
|
89
|
+
},
|
|
90
|
+
"get_etc": "SELECT * FROM etc"
|
|
91
|
+
}
|
|
92
|
+
```
|
sqloader-0.1.0/README.md
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
# sqloader
|
|
2
|
+
|
|
3
|
+
A lightweight Python utility for managing SQL migrations and loading SQL from JSON or .sql files.
|
|
4
|
+
Supports common relational databases and is designed for simple, clean integration with any Python backend (e.g., FastAPI).
|
|
5
|
+
|
|
6
|
+
---
|
|
7
|
+
|
|
8
|
+
## Installation
|
|
9
|
+
|
|
10
|
+
```powershell
|
|
11
|
+
pip install sqloader
|
|
12
|
+
```
|
|
13
|
+
|
|
14
|
+
## Features
|
|
15
|
+
|
|
16
|
+
- ✅ Easy database migration management
|
|
17
|
+
- ✅ Load SQL queries from `.json` or `.sql` files
|
|
18
|
+
- ✅ Supports MySQL and SQLite
|
|
19
|
+
- ✅ Clean API for integration
|
|
20
|
+
- ✅ Lightweight and dependency-minimized
|
|
21
|
+
|
|
22
|
+
## Quickstart
|
|
23
|
+
|
|
24
|
+
```python
|
|
25
|
+
from sqloader.init import database_init
|
|
26
|
+
|
|
27
|
+
config = {
|
|
28
|
+
"type": "mysql",
|
|
29
|
+
"mysql": {
|
|
30
|
+
"host": "localhost",
|
|
31
|
+
"port": 3306,
|
|
32
|
+
"user": "root",
|
|
33
|
+
"password": "pass",
|
|
34
|
+
"database": "mydb"
|
|
35
|
+
},
|
|
36
|
+
"service": {
|
|
37
|
+
"sqloder": "res/sql/sqloader/mysql"
|
|
38
|
+
},
|
|
39
|
+
"migration": {
|
|
40
|
+
"auto_migration": True,
|
|
41
|
+
"migration_path": "res/sql/migration/mysql"
|
|
42
|
+
},
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
db, sqloader, migrator = database_init(config)
|
|
46
|
+
|
|
47
|
+
# Example usage
|
|
48
|
+
query = sqloader.load_sql("user_info", "user.get_user_by_id")
|
|
49
|
+
result = db.select_one(query, (123,))
|
|
50
|
+
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
## SQL Loading Behavior
|
|
54
|
+
|
|
55
|
+
- If the value in the .json file ends with .sql, the referenced file will be loaded from the same directory.
|
|
56
|
+
- Otherwise, the value is treated as a raw SQL string.
|
|
57
|
+
|
|
58
|
+
Example JSON file user.json:
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
```json
|
|
62
|
+
{
|
|
63
|
+
"user": {
|
|
64
|
+
"get_user_by_id": "SELECT * FROM users WHERE id = %s",
|
|
65
|
+
"get_all_users": "user_all.sql"
|
|
66
|
+
},
|
|
67
|
+
"get_etc": "SELECT * FROM etc"
|
|
68
|
+
}
|
|
69
|
+
```
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools>=61.0"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "sqloader"
|
|
7
|
+
version = "0.1.0"
|
|
8
|
+
authors = [
|
|
9
|
+
{ name="horrible", email="shinjpn1@gmail.com" },
|
|
10
|
+
]
|
|
11
|
+
description = "A simple and extensible SQL migration and loader utility for Python."
|
|
12
|
+
readme = "README.md"
|
|
13
|
+
requires-python = ">=3.7"
|
|
14
|
+
classifiers = [
|
|
15
|
+
"Programming Language :: Python :: 3",
|
|
16
|
+
"License :: OSI Approved :: MIT License",
|
|
17
|
+
"Operating System :: OS Independent",
|
|
18
|
+
]
|
|
19
|
+
keywords = [
|
|
20
|
+
"sqloader", "sql loader", "migration", "database migration",
|
|
21
|
+
"MySQL", "SQLite", "SQL migration", "schema management",
|
|
22
|
+
"json sql loader"
|
|
23
|
+
]
|
|
24
|
+
dependencies=['LogAssist', 'pymysql']
|
|
25
|
+
|
|
26
|
+
[project.urls]
|
|
27
|
+
"Homepage" = "https://github.com/horrible-gh/sqloader"
|
|
28
|
+
"Bug Tracker" = "https://github.com/horrible-gh/sqloader/issues"
|
sqloader-0.1.0/setup.cfg
ADDED
sqloader-0.1.0/setup.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from setuptools import setup, find_packages
|
|
2
|
+
|
|
3
|
+
setup(
|
|
4
|
+
name='sqloader',
|
|
5
|
+
version='0.1.0',
|
|
6
|
+
description='LogAssist package',
|
|
7
|
+
author='horrible-gh',
|
|
8
|
+
author_email='shinjpn1@gmail.com',
|
|
9
|
+
url='https://github.com/horrible-gh/LogAssist.git',
|
|
10
|
+
packages=find_packages(),
|
|
11
|
+
classifiers=[
|
|
12
|
+
'Development Status :: 3 - Alpha',
|
|
13
|
+
'Intended Audience :: Developers',
|
|
14
|
+
'License :: OSI Approved :: MIT License',
|
|
15
|
+
'Programming Language :: Python :: 3',
|
|
16
|
+
'Programming Language :: Python :: 3.6',
|
|
17
|
+
'Programming Language :: Python :: 3.7',
|
|
18
|
+
'Programming Language :: Python :: 3.8',
|
|
19
|
+
'Programming Language :: Python :: 3.9',
|
|
20
|
+
'Programming Language :: Python :: 3.10',
|
|
21
|
+
'Programming Language :: Python :: 3.11',
|
|
22
|
+
],
|
|
23
|
+
python_requires='>=3.6',
|
|
24
|
+
install_requires=[
|
|
25
|
+
"LogAssist==1.1.1",
|
|
26
|
+
"pymysql==1.1.1"
|
|
27
|
+
],
|
|
28
|
+
)
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
SQLITE = 1
|
|
4
|
+
MYSQL = 2
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class DatabasePrototype:
|
|
8
|
+
db_type = ""
|
|
9
|
+
|
|
10
|
+
def connect(self):
|
|
11
|
+
pass
|
|
12
|
+
|
|
13
|
+
def reconnect(self):
|
|
14
|
+
pass
|
|
15
|
+
|
|
16
|
+
def execute(self, query, params=None, commit=True):
|
|
17
|
+
pass
|
|
18
|
+
|
|
19
|
+
def execute_query(self, query, params=None, commit=True):
|
|
20
|
+
pass
|
|
21
|
+
|
|
22
|
+
def commit(self):
|
|
23
|
+
pass
|
|
24
|
+
|
|
25
|
+
def fetch_one(self, query, params=None):
|
|
26
|
+
pass
|
|
27
|
+
|
|
28
|
+
def fetch_all(self, query, params=None):
|
|
29
|
+
pass
|
|
30
|
+
|
|
31
|
+
def close(self):
|
|
32
|
+
pass
|
|
33
|
+
|
|
34
|
+
def escape_string(value):
|
|
35
|
+
if isinstance(value, str):
|
|
36
|
+
replacements = {
|
|
37
|
+
"'": "''",
|
|
38
|
+
"--": "––",
|
|
39
|
+
";": ";",
|
|
40
|
+
"\\": "\\\\",
|
|
41
|
+
"%": "\\%",
|
|
42
|
+
"_": "\\_"
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
for old, new in replacements.items():
|
|
46
|
+
value = value.replace(old, new)
|
|
47
|
+
|
|
48
|
+
return value
|
|
49
|
+
|
|
50
|
+
def keep_alive(self, interval=600):
|
|
51
|
+
pass
|
|
52
|
+
|
|
53
|
+
def rollback(self):
|
|
54
|
+
pass
|
|
55
|
+
|
|
56
|
+
def set_sql_path(self, sql_path):
|
|
57
|
+
self.external_sql_path = sql_path
|
|
58
|
+
|
|
59
|
+
def load_sql(self, sql_file, directory="."):
|
|
60
|
+
if self.external_sql_path:
|
|
61
|
+
sql_path = f"{self.external_sql_path}/{directory}/{sql_file}"
|
|
62
|
+
if os.path.exists(sql_path):
|
|
63
|
+
with open(sql_path, 'r') as file:
|
|
64
|
+
return file.read()
|
|
65
|
+
else:
|
|
66
|
+
raise FileNotFoundError(f"File not found: {sql_path}")
|
|
67
|
+
else:
|
|
68
|
+
raise RuntimeError("External sql directory not initialized.")
|
|
69
|
+
|
|
70
|
+
def keep_alive(self):
|
|
71
|
+
pass
|
|
72
|
+
|
|
73
|
+
def begin_transaction(self):
|
|
74
|
+
pass
|
|
75
|
+
class Transaction:
|
|
76
|
+
def __init__(self, wrapper):
|
|
77
|
+
pass
|
|
78
|
+
|
|
79
|
+
def execute(self, query, params=None):
|
|
80
|
+
pass
|
|
81
|
+
|
|
82
|
+
def fetchall(self):
|
|
83
|
+
pass
|
|
84
|
+
|
|
85
|
+
def fetchone(self):
|
|
86
|
+
pass
|
|
87
|
+
|
|
88
|
+
def commit(self):
|
|
89
|
+
pass
|
|
90
|
+
|
|
91
|
+
def rollback(self):
|
|
92
|
+
pass
|
|
93
|
+
|
|
94
|
+
def close(self):
|
|
95
|
+
pass
|
|
96
|
+
|
|
97
|
+
def __enter__(self):
|
|
98
|
+
pass
|
|
99
|
+
|
|
100
|
+
def __exit__(self, exc_type, exc_val, traceback):
|
|
101
|
+
pass
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import LogAssist.log as Logger
|
|
2
|
+
from . import MySqlWrapper, SQLiteWrapper, DatabaseMigrator, SQLoader
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def check_and_get(config, target):
|
|
7
|
+
val = config.get(target, None)
|
|
8
|
+
if val != None:
|
|
9
|
+
return val
|
|
10
|
+
else:
|
|
11
|
+
raise Exception(f"Require value {target}")
|
|
12
|
+
|
|
13
|
+
def database_init(db_config):
|
|
14
|
+
db_instance = None
|
|
15
|
+
|
|
16
|
+
db_type = check_and_get(db_config, "type")
|
|
17
|
+
dbconn_info = db_config[db_type]
|
|
18
|
+
|
|
19
|
+
if db_type == "mysql":
|
|
20
|
+
host = check_and_get(dbconn_info, "host")
|
|
21
|
+
user = check_and_get(dbconn_info, "user")
|
|
22
|
+
password = check_and_get(dbconn_info, "password")
|
|
23
|
+
database = check_and_get(dbconn_info, "database")
|
|
24
|
+
port = dbconn_info.get("port", None)
|
|
25
|
+
log = dbconn_info.get("log", False)
|
|
26
|
+
|
|
27
|
+
if port != None:
|
|
28
|
+
mysql = MySqlWrapper(host=host, user=user, password=password, db=database, port=port, log=log)
|
|
29
|
+
else :
|
|
30
|
+
mysql = MySqlWrapper(host=host, user=user, password=password, db=database, log=log)
|
|
31
|
+
db_instance = mysql
|
|
32
|
+
Logger.debug("MySQL initialized")
|
|
33
|
+
elif db_type == "sqlite3" or db_type == "sqlite" or db_type == "local":
|
|
34
|
+
db_name = check_and_get(dbconn_info, "db_name")
|
|
35
|
+
sqlite3 = SQLiteWrapper(db_name=db_name)
|
|
36
|
+
db_instance = sqlite3
|
|
37
|
+
Logger.debug("SQLite3 initialized")
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
db_service = db_config.get("service", None)
|
|
41
|
+
sqloader = None
|
|
42
|
+
|
|
43
|
+
if db_service != None:
|
|
44
|
+
sqloader_path = db_service.get('sqloder', None)
|
|
45
|
+
if sqloader_path != None:
|
|
46
|
+
sqloader = SQLoader(sqloader_path)
|
|
47
|
+
Logger.debug("SQLoader initialized")
|
|
48
|
+
|
|
49
|
+
migration_config = db_config.get('migration', None)
|
|
50
|
+
migrator = None
|
|
51
|
+
Logger.debug(migration_config)
|
|
52
|
+
|
|
53
|
+
if migration_config != None:
|
|
54
|
+
try:
|
|
55
|
+
migration_path = check_and_get(migration_config, 'migration_path')
|
|
56
|
+
auto_migration = migration_config.get("auto_migration", False)
|
|
57
|
+
|
|
58
|
+
Logger.debug("Starting Database Migrator")
|
|
59
|
+
migrator = DatabaseMigrator(
|
|
60
|
+
db_instance, migration_path, auto_migration)
|
|
61
|
+
Logger.debug("Database Migration Successfully")
|
|
62
|
+
except Exception as e:
|
|
63
|
+
Logger.error(f"Database Migration Failed.{e}")
|
|
64
|
+
exit(1)
|
|
65
|
+
|
|
66
|
+
return db_instance, sqloader, migrator
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import glob
|
|
3
|
+
from ._prototype import DatabasePrototype, MYSQL, SQLITE
|
|
4
|
+
|
|
5
|
+
class DatabaseMigrator:
|
|
6
|
+
def __init__(self, db: DatabasePrototype, migrations_path, auto_run=False):
|
|
7
|
+
"""
|
|
8
|
+
:param db: MySQL 또는 SQLite 래퍼 객체
|
|
9
|
+
:param migrations_path: .sql 파일들이 들어 있는 폴더 경로 (절대 또는 상대)
|
|
10
|
+
:param auto_run: True일 경우, 초기화 시점에 자동으로 마이그레이션 적용
|
|
11
|
+
"""
|
|
12
|
+
self.db = db
|
|
13
|
+
# migrations_path를 절대 경로로 저장 (os.path.abspath 등)
|
|
14
|
+
self.migrations_path = os.path.abspath(migrations_path)
|
|
15
|
+
self.create_migrations_table()
|
|
16
|
+
if auto_run:
|
|
17
|
+
self.apply_migrations()
|
|
18
|
+
|
|
19
|
+
def create_migrations_table(self):
|
|
20
|
+
"""마이그레이션 기록용 테이블 생성"""
|
|
21
|
+
if self.db.db_type == MYSQL:
|
|
22
|
+
self.db.execute("""
|
|
23
|
+
CREATE TABLE IF NOT EXISTS migrations (
|
|
24
|
+
filename VARCHAR(255) PRIMARY KEY,
|
|
25
|
+
applied_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
26
|
+
);
|
|
27
|
+
""", None, commit=True)
|
|
28
|
+
elif self.db.db_type == SQLITE:
|
|
29
|
+
self.db.execute("""
|
|
30
|
+
CREATE TABLE IF NOT EXISTS migrations (
|
|
31
|
+
filename TEXT PRIMARY KEY,
|
|
32
|
+
applied_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
33
|
+
);
|
|
34
|
+
""", None, commit=True)
|
|
35
|
+
self.db.commit()
|
|
36
|
+
|
|
37
|
+
def apply_migrations(self):
|
|
38
|
+
"""아직 적용 안 된 마이그레이션 파일(.sql)들을 순서대로 실행"""
|
|
39
|
+
applied_migrations = self.get_applied_migrations()
|
|
40
|
+
for migration in self.get_migration_files():
|
|
41
|
+
# migration: "001_init.sql", "002_update.sql" 처럼 '상대 경로'만
|
|
42
|
+
if migration not in applied_migrations:
|
|
43
|
+
self.apply_migration(migration)
|
|
44
|
+
|
|
45
|
+
def apply_migration(self, migration):
|
|
46
|
+
full_path = os.path.join(self.migrations_path, migration)
|
|
47
|
+
|
|
48
|
+
with open(full_path, 'r', encoding='utf-8') as f:
|
|
49
|
+
sql_commands = f.read().split(';')
|
|
50
|
+
|
|
51
|
+
try:
|
|
52
|
+
# begin_transaction()을 통해 트랜잭션 컨텍스트 사용
|
|
53
|
+
with self.db.begin_transaction() as txn:
|
|
54
|
+
for command in sql_commands:
|
|
55
|
+
command = command.strip()
|
|
56
|
+
if command:
|
|
57
|
+
txn.execute(command)
|
|
58
|
+
# 트랜잭션 블록이 종료되면서 commit (예외 발생 시 rollback)
|
|
59
|
+
|
|
60
|
+
if self.db.db_type == SQLITE:
|
|
61
|
+
placeholder = '?'
|
|
62
|
+
else:
|
|
63
|
+
placeholder = '%s'
|
|
64
|
+
|
|
65
|
+
self.db.execute(
|
|
66
|
+
f"INSERT INTO migrations (filename) VALUES ({placeholder})",
|
|
67
|
+
(migration,),
|
|
68
|
+
commit=True
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
print(f"Migration {migration} applied successfully.")
|
|
72
|
+
except Exception as e:
|
|
73
|
+
raise Exception(f"Failed to apply migration {migration}: {e}")
|
|
74
|
+
|
|
75
|
+
def get_migration_files(self):
|
|
76
|
+
"""
|
|
77
|
+
마이그레이션 폴더 내 *.sql 파일을 찾아,
|
|
78
|
+
'migrations_path'로부터의 상대 경로 목록을 반환
|
|
79
|
+
"""
|
|
80
|
+
# 예: ["C:/path/to/migrations/001_init.sql", ...]
|
|
81
|
+
all_files = sorted(glob.glob(os.path.join(self.migrations_path, "*.sql")))
|
|
82
|
+
# 상대 경로만 추출 => ["001_init.sql", ...]
|
|
83
|
+
migration_files = [
|
|
84
|
+
os.path.relpath(f, self.migrations_path)
|
|
85
|
+
for f in all_files
|
|
86
|
+
]
|
|
87
|
+
return migration_files
|
|
88
|
+
|
|
89
|
+
def get_applied_migrations(self):
|
|
90
|
+
"""
|
|
91
|
+
DB에 이미 적용된 마이그레이션의 filename 목록을 set으로 반환
|
|
92
|
+
여기서 'filename'은 위에서 INSERT한 것과 동일(상대 경로)
|
|
93
|
+
"""
|
|
94
|
+
rows = self.db.fetch_all("SELECT filename FROM migrations")
|
|
95
|
+
return {row['filename'] for row in rows}
|
|
@@ -0,0 +1,288 @@
|
|
|
1
|
+
import pymysql
|
|
2
|
+
import os
|
|
3
|
+
import threading
|
|
4
|
+
from ._prototype import DatabasePrototype, Transaction, MYSQL
|
|
5
|
+
from pymysql.cursors import DictCursor
|
|
6
|
+
from time import sleep
|
|
7
|
+
|
|
8
|
+
query_semaphore = None
|
|
9
|
+
|
|
10
|
+
class MySqlWrapper(DatabasePrototype):
|
|
11
|
+
db_type = MYSQL
|
|
12
|
+
log_print = False
|
|
13
|
+
external_sql_path = None
|
|
14
|
+
|
|
15
|
+
def __init__(self, host, user, password, db, port=3306, log=False, keep_alive_interval=-1, sql_path=None, max_parallel_queries=5):
|
|
16
|
+
self.host = host
|
|
17
|
+
self.user = user
|
|
18
|
+
self.password = password
|
|
19
|
+
self.db = db
|
|
20
|
+
self.port = port
|
|
21
|
+
self.log_print = log
|
|
22
|
+
self.external_sql_path = sql_path
|
|
23
|
+
|
|
24
|
+
global query_semaphore
|
|
25
|
+
query_semaphore = threading.Semaphore(max_parallel_queries)
|
|
26
|
+
|
|
27
|
+
# 기존 self.conn, self.cursor를 제거하거나 사용 안 함
|
|
28
|
+
# connect()와 keep_alive()는 사실상 필요 없어질 수 있음
|
|
29
|
+
# 하지만 혹시 모를 호환을 위해 그대로 둠
|
|
30
|
+
self.connect()
|
|
31
|
+
|
|
32
|
+
if keep_alive_interval > 0:
|
|
33
|
+
self.keep_alive_interval = keep_alive_interval
|
|
34
|
+
self.keep_alive_thread = threading.Thread(target=self.keep_alive, daemon=True)
|
|
35
|
+
self.keep_alive_thread.start()
|
|
36
|
+
|
|
37
|
+
def connect(self):
|
|
38
|
+
"""기존 코드 호환을 위해 남겨둠, 하지만 execute_query에는 사용하지 않음."""
|
|
39
|
+
self.conn = pymysql.connect(
|
|
40
|
+
host=self.host,
|
|
41
|
+
user=self.user,
|
|
42
|
+
password=self.password,
|
|
43
|
+
db=self.db,
|
|
44
|
+
port=self.port,
|
|
45
|
+
cursorclass=DictCursor
|
|
46
|
+
)
|
|
47
|
+
self.cursor = self.conn.cursor()
|
|
48
|
+
|
|
49
|
+
def reconnect(self):
|
|
50
|
+
"""호환 유지. 다만 '매번 새 커넥션' 방식에서는 큰 의미가 없어집니다."""
|
|
51
|
+
try:
|
|
52
|
+
self.conn.ping(reconnect=True)
|
|
53
|
+
except:
|
|
54
|
+
self.connect()
|
|
55
|
+
|
|
56
|
+
def log(self, msg):
|
|
57
|
+
if self.log_print:
|
|
58
|
+
print(msg)
|
|
59
|
+
|
|
60
|
+
def normalize_params(self, params):
|
|
61
|
+
""" dict 형태의 params를 list로 변환하는 유틸리티 함수 """
|
|
62
|
+
if params is None:
|
|
63
|
+
return None
|
|
64
|
+
if isinstance(params, dict):
|
|
65
|
+
return list(params.values()) # ✅ dict -> list 변환
|
|
66
|
+
return params # ✅ 이미 list라면 그대로 반환
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def execute_query(self, query, params=None, commit=True, retry=1):
|
|
70
|
+
# 세마포어로 동시 실행 쿼리 수 제한
|
|
71
|
+
query_semaphore.acquire()
|
|
72
|
+
try:
|
|
73
|
+
conn = pymysql.connect(
|
|
74
|
+
host=self.host,
|
|
75
|
+
user=self.user,
|
|
76
|
+
password=self.password,
|
|
77
|
+
db=self.db,
|
|
78
|
+
port=self.port,
|
|
79
|
+
cursorclass=DictCursor
|
|
80
|
+
)
|
|
81
|
+
try:
|
|
82
|
+
with conn.cursor(DictCursor) as cursor:
|
|
83
|
+
if params is None:
|
|
84
|
+
self.log(query)
|
|
85
|
+
result = cursor.execute(query)
|
|
86
|
+
else:
|
|
87
|
+
self.log(query)
|
|
88
|
+
self.log(params)
|
|
89
|
+
# 파라미터가 dict면 리스트로 변환하는 처리
|
|
90
|
+
params = self.normalize_params(params)
|
|
91
|
+
result = cursor.execute(query, params)
|
|
92
|
+
|
|
93
|
+
if commit:
|
|
94
|
+
conn.commit()
|
|
95
|
+
return result
|
|
96
|
+
|
|
97
|
+
except pymysql.MySQLError as e:
|
|
98
|
+
print(f"Error executing query: {e}")
|
|
99
|
+
print(f"Last query: {query}")
|
|
100
|
+
# rollback 시도
|
|
101
|
+
try:
|
|
102
|
+
conn.rollback()
|
|
103
|
+
except Exception as ex:
|
|
104
|
+
print(f"Rollback failed: {ex}")
|
|
105
|
+
# 세션 끊김(2006) 에러 발생 시 재시도
|
|
106
|
+
if e.args[0] == 2006 and retry > 0:
|
|
107
|
+
print("MySQL server has gone away. Reconnecting and retrying query...")
|
|
108
|
+
conn.close()
|
|
109
|
+
return self.execute_query(query, params, commit, retry=retry-1)
|
|
110
|
+
else:
|
|
111
|
+
raise e
|
|
112
|
+
finally:
|
|
113
|
+
try:
|
|
114
|
+
conn.close()
|
|
115
|
+
except Exception as ex:
|
|
116
|
+
print(f"Closing connection failed: {ex}")
|
|
117
|
+
finally:
|
|
118
|
+
query_semaphore.release()
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
# execute()도 동일하게 수정
|
|
122
|
+
def execute(self, query, params=None, commit=True):
|
|
123
|
+
return self.execute_query(query, params, commit)
|
|
124
|
+
|
|
125
|
+
# fetch_all, fetch_one도 동일한 패턴으로 새 커넥션 쓰도록
|
|
126
|
+
def fetch_all(self, query, params=None):
|
|
127
|
+
query_semaphore.acquire()
|
|
128
|
+
try:
|
|
129
|
+
conn = pymysql.connect(
|
|
130
|
+
host=self.host,
|
|
131
|
+
user=self.user,
|
|
132
|
+
password=self.password,
|
|
133
|
+
db=self.db,
|
|
134
|
+
port=self.port,
|
|
135
|
+
cursorclass=DictCursor
|
|
136
|
+
)
|
|
137
|
+
with conn.cursor(DictCursor) as cursor:
|
|
138
|
+
# 파라미터가 dict면 리스트로 변환하는 처리
|
|
139
|
+
params = self.normalize_params(params)
|
|
140
|
+
if params is None:
|
|
141
|
+
self.log(query)
|
|
142
|
+
cursor.execute(query)
|
|
143
|
+
else:
|
|
144
|
+
self.log(query)
|
|
145
|
+
self.log(params)
|
|
146
|
+
cursor.execute(query, params)
|
|
147
|
+
return cursor.fetchall()
|
|
148
|
+
except pymysql.MySQLError as e:
|
|
149
|
+
print(f"Error fetching data: {e}")
|
|
150
|
+
print(f"Last query: {query}")
|
|
151
|
+
if e.args[0] == 2006:
|
|
152
|
+
pass # 재시도 로직 가능
|
|
153
|
+
raise e
|
|
154
|
+
finally:
|
|
155
|
+
try:
|
|
156
|
+
conn.close()
|
|
157
|
+
except:
|
|
158
|
+
pass
|
|
159
|
+
query_semaphore.release()
|
|
160
|
+
|
|
161
|
+
def fetch_one(self, query, params=None):
|
|
162
|
+
query_semaphore.acquire()
|
|
163
|
+
try:
|
|
164
|
+
conn = pymysql.connect(
|
|
165
|
+
host=self.host,
|
|
166
|
+
user=self.user,
|
|
167
|
+
password=self.password,
|
|
168
|
+
db=self.db,
|
|
169
|
+
port=self.port,
|
|
170
|
+
cursorclass=DictCursor
|
|
171
|
+
)
|
|
172
|
+
with conn.cursor(DictCursor) as cursor:
|
|
173
|
+
# 파라미터가 dict면 리스트로 변환하는 처리
|
|
174
|
+
params = self.normalize_params(params)
|
|
175
|
+
if params is None:
|
|
176
|
+
self.log(query)
|
|
177
|
+
cursor.execute(query)
|
|
178
|
+
else:
|
|
179
|
+
self.log(query)
|
|
180
|
+
self.log(params)
|
|
181
|
+
cursor.execute(query, params)
|
|
182
|
+
return cursor.fetchone()
|
|
183
|
+
except pymysql.MySQLError as e:
|
|
184
|
+
print(f"Error fetching data: {e}")
|
|
185
|
+
print(f"Last query: {query}")
|
|
186
|
+
|
|
187
|
+
if e.args[0] == 2006:
|
|
188
|
+
pass # 재시도 로직 가능
|
|
189
|
+
raise e
|
|
190
|
+
finally:
|
|
191
|
+
try:
|
|
192
|
+
conn.close()
|
|
193
|
+
except:
|
|
194
|
+
pass
|
|
195
|
+
query_semaphore.release()
|
|
196
|
+
|
|
197
|
+
def commit(self):
|
|
198
|
+
# 더 이상 self.conn을 쓰지 않으므로 의미가 없을 수 있음
|
|
199
|
+
pass
|
|
200
|
+
|
|
201
|
+
def rollback(self):
|
|
202
|
+
pass
|
|
203
|
+
|
|
204
|
+
def close(self):
|
|
205
|
+
# 기존 self.cursor, self.conn 닫아두기 (호환성)
|
|
206
|
+
if hasattr(self, 'cursor') and self.cursor:
|
|
207
|
+
self.cursor.close()
|
|
208
|
+
if hasattr(self, 'conn') and self.conn:
|
|
209
|
+
self.conn.close()
|
|
210
|
+
|
|
211
|
+
def load_sql(self, sql_file, directory="."):
|
|
212
|
+
if self.external_sql_path:
|
|
213
|
+
sql_path = f"{self.external_sql_path}/{directory}/{sql_file}"
|
|
214
|
+
if os.path.exists(sql_path):
|
|
215
|
+
with open(sql_path, 'r') as file:
|
|
216
|
+
return file.read()
|
|
217
|
+
else:
|
|
218
|
+
raise FileNotFoundError(f"File not found: {sql_path}")
|
|
219
|
+
else:
|
|
220
|
+
raise RuntimeError("External sql directory not initialized.")
|
|
221
|
+
|
|
222
|
+
def keep_alive(self):
|
|
223
|
+
"""
|
|
224
|
+
원래는 self.conn에 주기적으로 ping을 보내는 로직.
|
|
225
|
+
매번 새 커넥션을 쓰는 구조라면 필요 없어질 수 있음.
|
|
226
|
+
일단 기존 코드 유지, 실제로는 큰 효과가 없을 것.
|
|
227
|
+
"""
|
|
228
|
+
while True:
|
|
229
|
+
sleep(self.keep_alive_interval)
|
|
230
|
+
try:
|
|
231
|
+
with self.conn.cursor() as cursor:
|
|
232
|
+
cursor.execute("SELECT 1")
|
|
233
|
+
except pymysql.MySQLError as e:
|
|
234
|
+
print(f"Keep-alive query failed: {e}")
|
|
235
|
+
if e.args[0] == 2006:
|
|
236
|
+
print("Reconnecting to the database for keep-alive...")
|
|
237
|
+
self.reconnect()
|
|
238
|
+
|
|
239
|
+
def begin_transaction(self):
|
|
240
|
+
"""
|
|
241
|
+
트랜잭션 컨텍스트를 생성하여 반환.
|
|
242
|
+
트랜잭션 내에서는 동일한 커넥션을 사용하므로,
|
|
243
|
+
여러 쿼리를 하나의 트랜잭션으로 묶을 수 있습니다.
|
|
244
|
+
"""
|
|
245
|
+
return MySqlTransaction(self)
|
|
246
|
+
|
|
247
|
+
class MySqlTransaction(Transaction):
|
|
248
|
+
def __init__(self, wrapper: MySqlWrapper):
|
|
249
|
+
self.wrapper = wrapper
|
|
250
|
+
self.conn = pymysql.connect(
|
|
251
|
+
host=wrapper.host,
|
|
252
|
+
user=wrapper.user,
|
|
253
|
+
password=wrapper.password,
|
|
254
|
+
db=wrapper.db,
|
|
255
|
+
port=wrapper.port,
|
|
256
|
+
cursorclass=DictCursor
|
|
257
|
+
)
|
|
258
|
+
self.cursor = self.conn.cursor()
|
|
259
|
+
|
|
260
|
+
def execute(self, query, params=None):
|
|
261
|
+
return self.cursor.execute(query, params)
|
|
262
|
+
|
|
263
|
+
def fetchall(self):
|
|
264
|
+
return self.cursor.fetchall()
|
|
265
|
+
|
|
266
|
+
def fetchone(self):
|
|
267
|
+
return self.cursor.fetchone()
|
|
268
|
+
|
|
269
|
+
def commit(self):
|
|
270
|
+
self.conn.commit()
|
|
271
|
+
|
|
272
|
+
def rollback(self):
|
|
273
|
+
self.conn.rollback()
|
|
274
|
+
|
|
275
|
+
def close(self):
|
|
276
|
+
self.cursor.close()
|
|
277
|
+
self.conn.close()
|
|
278
|
+
|
|
279
|
+
def __enter__(self):
|
|
280
|
+
return self # Transaction 객체를 반환
|
|
281
|
+
|
|
282
|
+
def __exit__(self, exc_type, exc_val, traceback):
|
|
283
|
+
# 예외 발생 시 rollback, 그렇지 않으면 commit
|
|
284
|
+
if exc_type:
|
|
285
|
+
self.rollback()
|
|
286
|
+
else:
|
|
287
|
+
self.commit()
|
|
288
|
+
self.close()
|
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
import sqlite3
|
|
2
|
+
import threading
|
|
3
|
+
from ._prototype import DatabasePrototype, Transaction, SQLITE
|
|
4
|
+
|
|
5
|
+
query_semaphore = None
|
|
6
|
+
|
|
7
|
+
db_lock = threading.Lock()
|
|
8
|
+
|
|
9
|
+
class SQLiteWrapper(DatabasePrototype):
|
|
10
|
+
db_type = SQLITE
|
|
11
|
+
|
|
12
|
+
def __init__(self, db_name, memory_mode=False, max_parallel_queries=5):
|
|
13
|
+
self.db_name = db_name
|
|
14
|
+
self.memory_mode = memory_mode
|
|
15
|
+
|
|
16
|
+
global query_semaphore
|
|
17
|
+
query_semaphore = threading.Semaphore(max_parallel_queries)
|
|
18
|
+
|
|
19
|
+
if self.memory_mode:
|
|
20
|
+
# 인메모리: 단일 커넥션 + Lock
|
|
21
|
+
self.conn = sqlite3.connect(":memory:", check_same_thread=False)
|
|
22
|
+
self.cursor = self.conn.cursor()
|
|
23
|
+
# 만약 파일에서 데이터를 복사해야 한다면, 필요에 따라 아래 로직 사용
|
|
24
|
+
# backup_conn = sqlite3.connect(db_name)
|
|
25
|
+
# backup_conn.backup(self.conn)
|
|
26
|
+
# backup_conn.close()
|
|
27
|
+
else:
|
|
28
|
+
# 파일 모드: 나중에 세마포어 방식으로 새 커넥션을 여므로,
|
|
29
|
+
# 여기서 굳이 conn/cursor를 만들지 않아도 됩니다.
|
|
30
|
+
self.conn = None
|
|
31
|
+
self.cursor = None
|
|
32
|
+
|
|
33
|
+
def _execute_memory(self, query, params=None, commit=True):
|
|
34
|
+
"""인메모리 모드 - 단일 커넥션 + Lock(직렬화)."""
|
|
35
|
+
with db_lock:
|
|
36
|
+
try:
|
|
37
|
+
if params is None:
|
|
38
|
+
self.cursor.execute(query)
|
|
39
|
+
else:
|
|
40
|
+
self.cursor.execute(query, params)
|
|
41
|
+
if commit:
|
|
42
|
+
self.conn.commit()
|
|
43
|
+
return self.cursor.lastrowid
|
|
44
|
+
except sqlite3.DatabaseError as e:
|
|
45
|
+
print(f"Error executing query: {e}")
|
|
46
|
+
self.conn.rollback()
|
|
47
|
+
raise e
|
|
48
|
+
|
|
49
|
+
def _execute_file(self, query, params=None, commit=True):
|
|
50
|
+
"""파일 모드 - 세마포어 + 새 커넥션(병렬 제한)."""
|
|
51
|
+
query_semaphore.acquire()
|
|
52
|
+
try:
|
|
53
|
+
conn = sqlite3.connect(self.db_name, check_same_thread=False)
|
|
54
|
+
cursor = conn.cursor()
|
|
55
|
+
try:
|
|
56
|
+
if params is None:
|
|
57
|
+
cursor.execute(query)
|
|
58
|
+
else:
|
|
59
|
+
cursor.execute(query, params)
|
|
60
|
+
if commit:
|
|
61
|
+
conn.commit()
|
|
62
|
+
return cursor.lastrowid
|
|
63
|
+
except sqlite3.DatabaseError as e:
|
|
64
|
+
print(f"Error executing query (file mode): {e}")
|
|
65
|
+
conn.rollback()
|
|
66
|
+
raise e
|
|
67
|
+
finally:
|
|
68
|
+
cursor.close()
|
|
69
|
+
conn.close()
|
|
70
|
+
finally:
|
|
71
|
+
query_semaphore.release()
|
|
72
|
+
|
|
73
|
+
def execute(self, query, params=None, commit=True):
|
|
74
|
+
if self.memory_mode:
|
|
75
|
+
return self._execute_memory(query, params, commit)
|
|
76
|
+
else:
|
|
77
|
+
return self._execute_file(query, params, commit)
|
|
78
|
+
|
|
79
|
+
def fetch_one(self, query, params=None):
|
|
80
|
+
if self.memory_mode:
|
|
81
|
+
with db_lock:
|
|
82
|
+
try:
|
|
83
|
+
if params is None:
|
|
84
|
+
self.cursor.execute(query)
|
|
85
|
+
else:
|
|
86
|
+
self.cursor.execute(query, params)
|
|
87
|
+
return self.cursor.fetchone()
|
|
88
|
+
except sqlite3.DatabaseError as e:
|
|
89
|
+
print(f"Error fetching data (memory mode, fetch_one): {e}")
|
|
90
|
+
raise e
|
|
91
|
+
else:
|
|
92
|
+
# 파일 모드: 별도 새 커넥션으로 fetch
|
|
93
|
+
query_semaphore.acquire()
|
|
94
|
+
try:
|
|
95
|
+
conn = sqlite3.connect(self.db_name, check_same_thread=False)
|
|
96
|
+
cursor = conn.cursor()
|
|
97
|
+
try:
|
|
98
|
+
if params is None:
|
|
99
|
+
cursor.execute(query)
|
|
100
|
+
else:
|
|
101
|
+
cursor.execute(query, params)
|
|
102
|
+
return cursor.fetchone()
|
|
103
|
+
except sqlite3.DatabaseError as e:
|
|
104
|
+
print(f"Error fetching data (file mode, fetch_one): {e}")
|
|
105
|
+
raise e
|
|
106
|
+
finally:
|
|
107
|
+
cursor.close()
|
|
108
|
+
conn.close()
|
|
109
|
+
finally:
|
|
110
|
+
query_semaphore.release()
|
|
111
|
+
|
|
112
|
+
def fetch_all(self, query, params=None):
|
|
113
|
+
if self.memory_mode:
|
|
114
|
+
with db_lock:
|
|
115
|
+
try:
|
|
116
|
+
if params is None:
|
|
117
|
+
self.cursor.execute(query)
|
|
118
|
+
else:
|
|
119
|
+
self.cursor.execute(query, params)
|
|
120
|
+
return self.cursor.fetchall()
|
|
121
|
+
except sqlite3.DatabaseError as e:
|
|
122
|
+
print(f"Error fetching data (memory mode, fetch_all): {e}")
|
|
123
|
+
raise e
|
|
124
|
+
else:
|
|
125
|
+
query_semaphore.acquire()
|
|
126
|
+
try:
|
|
127
|
+
conn = sqlite3.connect(self.db_name, check_same_thread=False)
|
|
128
|
+
cursor = conn.cursor()
|
|
129
|
+
try:
|
|
130
|
+
if params is None:
|
|
131
|
+
cursor.execute(query)
|
|
132
|
+
else:
|
|
133
|
+
cursor.execute(query, params)
|
|
134
|
+
return cursor.fetchall()
|
|
135
|
+
except sqlite3.DatabaseError as e:
|
|
136
|
+
print(f"Error fetching data (file mode, fetch_all): {e}")
|
|
137
|
+
raise e
|
|
138
|
+
finally:
|
|
139
|
+
cursor.close()
|
|
140
|
+
conn.close()
|
|
141
|
+
finally:
|
|
142
|
+
query_semaphore.release()
|
|
143
|
+
|
|
144
|
+
def rollback(self):
|
|
145
|
+
if self.memory_mode:
|
|
146
|
+
with db_lock:
|
|
147
|
+
self.conn.rollback()
|
|
148
|
+
else:
|
|
149
|
+
# 파일 모드는 매번 새 커넥션을 쓰므로
|
|
150
|
+
# rollback할 '지속 커넥션'이 없습니다
|
|
151
|
+
pass
|
|
152
|
+
|
|
153
|
+
def commit(self):
|
|
154
|
+
if self.memory_mode:
|
|
155
|
+
with db_lock:
|
|
156
|
+
self.conn.commit()
|
|
157
|
+
else:
|
|
158
|
+
# 파일 모드는 매번 새 커넥션이므로 여기서 commit할 일이 없음
|
|
159
|
+
pass
|
|
160
|
+
|
|
161
|
+
def close(self):
|
|
162
|
+
if self.memory_mode:
|
|
163
|
+
with db_lock:
|
|
164
|
+
self.cursor.close()
|
|
165
|
+
self.conn.close()
|
|
166
|
+
else:
|
|
167
|
+
# 파일모드에선 conn이 따로 없으니 무처리
|
|
168
|
+
pass
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def begin_transaction(self):
|
|
172
|
+
"""
|
|
173
|
+
트랜잭션 컨텍스트를 생성하여 반환.
|
|
174
|
+
트랜잭션 내에서는 동일한 커넥션을 사용하므로,
|
|
175
|
+
여러 쿼리를 하나의 트랜잭션으로 묶을 수 있습니다.
|
|
176
|
+
"""
|
|
177
|
+
return SQLiteTransaction(self)
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
class SQLiteTransaction:
|
|
181
|
+
def __init__(self, wrapper: SQLiteWrapper):
|
|
182
|
+
self.wrapper = wrapper
|
|
183
|
+
self.conn = sqlite3.connect(
|
|
184
|
+
wrapper.db_name,
|
|
185
|
+
check_same_thread=False
|
|
186
|
+
)
|
|
187
|
+
self.cursor = self.conn.cursor()
|
|
188
|
+
|
|
189
|
+
def execute(self, query, params=None):
|
|
190
|
+
if params is None:
|
|
191
|
+
return self.cursor.execute(query)
|
|
192
|
+
return self.cursor.execute(query, params)
|
|
193
|
+
|
|
194
|
+
def fetchall(self):
|
|
195
|
+
return self.cursor.fetchall()
|
|
196
|
+
|
|
197
|
+
def fetchone(self):
|
|
198
|
+
return self.cursor.fetchone()
|
|
199
|
+
|
|
200
|
+
def commit(self):
|
|
201
|
+
self.conn.commit()
|
|
202
|
+
|
|
203
|
+
def rollback(self):
|
|
204
|
+
self.conn.rollback()
|
|
205
|
+
|
|
206
|
+
def close(self):
|
|
207
|
+
self.cursor.close()
|
|
208
|
+
self.conn.close()
|
|
209
|
+
|
|
210
|
+
def __enter__(self):
|
|
211
|
+
return self # 컨텍스트 매니저 지원
|
|
212
|
+
|
|
213
|
+
def __exit__(self, exc_type, exc_val, traceback):
|
|
214
|
+
if exc_type:
|
|
215
|
+
self.rollback()
|
|
216
|
+
else:
|
|
217
|
+
self.commit()
|
|
218
|
+
self.close()
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class SQLoader:
|
|
6
|
+
|
|
7
|
+
def __init__(self, dir) -> None:
|
|
8
|
+
self.sql_dir = dir
|
|
9
|
+
|
|
10
|
+
def check_file_exists(self, file_path):
|
|
11
|
+
return os.path.isfile(file_path)
|
|
12
|
+
|
|
13
|
+
def read_json_file(self, file_path):
|
|
14
|
+
if self.check_file_exists(file_path):
|
|
15
|
+
with open(file_path, 'r') as file:
|
|
16
|
+
return json.load(file)
|
|
17
|
+
else:
|
|
18
|
+
raise FileNotFoundError(f"File not found: {file_path}")
|
|
19
|
+
|
|
20
|
+
def read_sql_file(self, file_path, encode="utf-8"):
|
|
21
|
+
if self.check_file_exists(file_path):
|
|
22
|
+
with open(file_path, 'r', encoding=encode) as file:
|
|
23
|
+
return file.read()
|
|
24
|
+
else:
|
|
25
|
+
raise FileNotFoundError(f"File not found: {file_path}")
|
|
26
|
+
|
|
27
|
+
def deep_get(self, dictionary:dict, dotted_key:str):
|
|
28
|
+
keys = dotted_key.split(".")
|
|
29
|
+
for key in keys:
|
|
30
|
+
if isinstance(dictionary, dict):
|
|
31
|
+
dictionary = dictionary.get(key)
|
|
32
|
+
else:
|
|
33
|
+
return None
|
|
34
|
+
return dictionary
|
|
35
|
+
|
|
36
|
+
def load_sql(self, filename: str, query_name: str, encode="utf-8"):
|
|
37
|
+
suffix = ".json"
|
|
38
|
+
if suffix in filename:
|
|
39
|
+
suffix = ""
|
|
40
|
+
file_path = os.path.join(self.sql_dir, f"{filename}{suffix}")
|
|
41
|
+
queries = self.read_json_file(file_path)
|
|
42
|
+
|
|
43
|
+
query = self.deep_get(queries, query_name)
|
|
44
|
+
if query is None:
|
|
45
|
+
raise ValueError(f"Query not found: {query_name}")
|
|
46
|
+
|
|
47
|
+
if isinstance(query, str) and query.endswith('.sql'):
|
|
48
|
+
query_file_path = os.path.join(self.sql_dir, query)
|
|
49
|
+
return self.read_sql_file(query_file_path, encode)
|
|
50
|
+
else:
|
|
51
|
+
return query
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: sqloader
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: A simple and extensible SQL migration and loader utility for Python.
|
|
5
|
+
Home-page: https://github.com/horrible-gh/LogAssist.git
|
|
6
|
+
Author: horrible-gh
|
|
7
|
+
Author-email: horrible <shinjpn1@gmail.com>
|
|
8
|
+
Project-URL: Homepage, https://github.com/horrible-gh/sqloader
|
|
9
|
+
Project-URL: Bug Tracker, https://github.com/horrible-gh/sqloader/issues
|
|
10
|
+
Keywords: sqloader,sql loader,migration,database migration,MySQL,SQLite,SQL migration,schema management,json sql loader
|
|
11
|
+
Classifier: Programming Language :: Python :: 3
|
|
12
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
13
|
+
Classifier: Operating System :: OS Independent
|
|
14
|
+
Requires-Python: >=3.6
|
|
15
|
+
Description-Content-Type: text/markdown
|
|
16
|
+
License-File: LICENSE
|
|
17
|
+
Requires-Dist: LogAssist
|
|
18
|
+
Requires-Dist: pymysql
|
|
19
|
+
Dynamic: author
|
|
20
|
+
Dynamic: home-page
|
|
21
|
+
Dynamic: license-file
|
|
22
|
+
Dynamic: requires-python
|
|
23
|
+
|
|
24
|
+
# sqloader
|
|
25
|
+
|
|
26
|
+
A lightweight Python utility for managing SQL migrations and loading SQL from JSON or .sql files.
|
|
27
|
+
Supports common relational databases and is designed for simple, clean integration with any Python backend (e.g., FastAPI).
|
|
28
|
+
|
|
29
|
+
---
|
|
30
|
+
|
|
31
|
+
## Installation
|
|
32
|
+
|
|
33
|
+
```powershell
|
|
34
|
+
pip install sqloader
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
## Features
|
|
38
|
+
|
|
39
|
+
- ✅ Easy database migration management
|
|
40
|
+
- ✅ Load SQL queries from `.json` or `.sql` files
|
|
41
|
+
- ✅ Supports MySQL and SQLite
|
|
42
|
+
- ✅ Clean API for integration
|
|
43
|
+
- ✅ Lightweight and dependency-minimized
|
|
44
|
+
|
|
45
|
+
## Quickstart
|
|
46
|
+
|
|
47
|
+
```python
|
|
48
|
+
from sqloader.init import database_init
|
|
49
|
+
|
|
50
|
+
config = {
|
|
51
|
+
"type": "mysql",
|
|
52
|
+
"mysql": {
|
|
53
|
+
"host": "localhost",
|
|
54
|
+
"port": 3306,
|
|
55
|
+
"user": "root",
|
|
56
|
+
"password": "pass",
|
|
57
|
+
"database": "mydb"
|
|
58
|
+
},
|
|
59
|
+
"service": {
|
|
60
|
+
"sqloder": "res/sql/sqloader/mysql"
|
|
61
|
+
},
|
|
62
|
+
"migration": {
|
|
63
|
+
"auto_migration": True,
|
|
64
|
+
"migration_path": "res/sql/migration/mysql"
|
|
65
|
+
},
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
db, sqloader, migrator = database_init(config)
|
|
69
|
+
|
|
70
|
+
# Example usage
|
|
71
|
+
query = sqloader.load_sql("user_info", "user.get_user_by_id")
|
|
72
|
+
result = db.select_one(query, (123,))
|
|
73
|
+
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
## SQL Loading Behavior
|
|
77
|
+
|
|
78
|
+
- If the value in the .json file ends with .sql, the referenced file will be loaded from the same directory.
|
|
79
|
+
- Otherwise, the value is treated as a raw SQL string.
|
|
80
|
+
|
|
81
|
+
Example JSON file user.json:
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
```json
|
|
85
|
+
{
|
|
86
|
+
"user": {
|
|
87
|
+
"get_user_by_id": "SELECT * FROM users WHERE id = %s",
|
|
88
|
+
"get_all_users": "user_all.sql"
|
|
89
|
+
},
|
|
90
|
+
"get_etc": "SELECT * FROM etc"
|
|
91
|
+
}
|
|
92
|
+
```
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
LICENSE
|
|
2
|
+
README.md
|
|
3
|
+
pyproject.toml
|
|
4
|
+
setup.py
|
|
5
|
+
sqloader/__init__.py
|
|
6
|
+
sqloader/_prototype.py
|
|
7
|
+
sqloader/init.py
|
|
8
|
+
sqloader/migrator.py
|
|
9
|
+
sqloader/mysql.py
|
|
10
|
+
sqloader/sqlite3.py
|
|
11
|
+
sqloader/sqloader.py
|
|
12
|
+
sqloader.egg-info/PKG-INFO
|
|
13
|
+
sqloader.egg-info/SOURCES.txt
|
|
14
|
+
sqloader.egg-info/dependency_links.txt
|
|
15
|
+
sqloader.egg-info/requires.txt
|
|
16
|
+
sqloader.egg-info/top_level.txt
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
sqloader
|