MEDfl 0.1.31__py3-none-any.whl → 0.1.33__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {MEDfl-0.1.31.dist-info → MEDfl-0.1.33.dist-info}/METADATA +127 -128
- MEDfl-0.1.33.dist-info/RECORD +34 -0
- {MEDfl-0.1.31.dist-info → MEDfl-0.1.33.dist-info}/WHEEL +1 -1
- {MEDfl-0.1.31.dist-info → MEDfl-0.1.33.dist-info}/top_level.txt +0 -1
- Medfl/LearningManager/__init__.py +13 -13
- Medfl/LearningManager/client.py +150 -150
- Medfl/LearningManager/dynamicModal.py +287 -287
- Medfl/LearningManager/federated_dataset.py +60 -60
- Medfl/LearningManager/flpipeline.py +192 -192
- Medfl/LearningManager/model.py +223 -223
- Medfl/LearningManager/params.yaml +14 -14
- Medfl/LearningManager/params_optimiser.py +442 -442
- Medfl/LearningManager/plot.py +229 -229
- Medfl/LearningManager/server.py +181 -181
- Medfl/LearningManager/strategy.py +82 -82
- Medfl/LearningManager/utils.py +331 -308
- Medfl/NetManager/__init__.py +10 -9
- Medfl/NetManager/database_connector.py +43 -48
- Medfl/NetManager/dataset.py +92 -92
- Medfl/NetManager/flsetup.py +320 -320
- Medfl/NetManager/net_helper.py +254 -248
- Medfl/NetManager/net_manager_queries.py +142 -137
- Medfl/NetManager/network.py +194 -174
- Medfl/NetManager/node.py +184 -178
- Medfl/__init__.py +3 -2
- Medfl/scripts/__init__.py +2 -0
- Medfl/scripts/base.py +30 -0
- Medfl/scripts/create_db.py +126 -0
- alembic/env.py +61 -61
- scripts/base.py +29 -29
- scripts/config.ini +5 -5
- scripts/create_db.py +133 -133
- MEDfl/LearningManager/__init__.py +0 -13
- MEDfl/LearningManager/client.py +0 -150
- MEDfl/LearningManager/dynamicModal.py +0 -287
- MEDfl/LearningManager/federated_dataset.py +0 -60
- MEDfl/LearningManager/flpipeline.py +0 -192
- MEDfl/LearningManager/model.py +0 -223
- MEDfl/LearningManager/params.yaml +0 -14
- MEDfl/LearningManager/params_optimiser.py +0 -442
- MEDfl/LearningManager/plot.py +0 -229
- MEDfl/LearningManager/server.py +0 -181
- MEDfl/LearningManager/strategy.py +0 -82
- MEDfl/LearningManager/utils.py +0 -333
- MEDfl/NetManager/__init__.py +0 -9
- MEDfl/NetManager/database_connector.py +0 -48
- MEDfl/NetManager/dataset.py +0 -92
- MEDfl/NetManager/flsetup.py +0 -320
- MEDfl/NetManager/net_helper.py +0 -248
- MEDfl/NetManager/net_manager_queries.py +0 -137
- MEDfl/NetManager/network.py +0 -174
- MEDfl/NetManager/node.py +0 -178
- MEDfl/__init__.py +0 -2
- MEDfl-0.1.31.data/scripts/setup_mysql.sh +0 -22
- MEDfl-0.1.31.dist-info/RECORD +0 -54
- scripts/db_config.ini +0 -6
Medfl/NetManager/node.py
CHANGED
@@ -1,178 +1,184 @@
|
|
1
|
-
import pandas as pd
|
2
|
-
|
3
|
-
from .net_helper import *
|
4
|
-
from .net_manager_queries import *
|
5
|
-
from MEDfl.LearningManager.utils import params
|
6
|
-
from MEDfl.NetManager.database_connector import DatabaseManager
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
if not isinstance(self.
|
44
|
-
raise TypeError("
|
45
|
-
|
46
|
-
if not isinstance(self.
|
47
|
-
raise TypeError("
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
)
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
"""
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
"""
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
"""
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
1
|
+
import pandas as pd
|
2
|
+
|
3
|
+
from .net_helper import *
|
4
|
+
from .net_manager_queries import *
|
5
|
+
from MEDfl.LearningManager.utils import params
|
6
|
+
from MEDfl.NetManager.database_connector import DatabaseManager
|
7
|
+
|
8
|
+
from sqlalchemy import text, exc
|
9
|
+
|
10
|
+
|
11
|
+
class Node:
|
12
|
+
"""
|
13
|
+
A class representing a node in the network.
|
14
|
+
|
15
|
+
Attributes:
|
16
|
+
name (str): The name of the node.
|
17
|
+
train (int): An integer flag representing whether the node is used for training (1) or testing (0).
|
18
|
+
test_fraction (float, optional): The fraction of data used for testing when train=1. Default is 0.2.
|
19
|
+
"""
|
20
|
+
|
21
|
+
def __init__(
|
22
|
+
self, name: str, train: int, test_fraction: float = 0.2, engine=None
|
23
|
+
):
|
24
|
+
"""
|
25
|
+
Initialize a Node instance.
|
26
|
+
|
27
|
+
Parameters:
|
28
|
+
name (str): The name of the node.
|
29
|
+
train (int): An integer flag representing whether the node is used for training (1) or testing (0).
|
30
|
+
test_fraction (float, optional): The fraction of data used for testing when train=1. Default is 0.2.
|
31
|
+
"""
|
32
|
+
self.name = name
|
33
|
+
self.train = train
|
34
|
+
self.test_fraction = 1.0 if self.train == 0 else test_fraction
|
35
|
+
|
36
|
+
|
37
|
+
db_manager = DatabaseManager() ;
|
38
|
+
db_manager.connect() ;
|
39
|
+
self.engine = db_manager.get_connection()
|
40
|
+
|
41
|
+
def validate(self):
|
42
|
+
"""Validate name, train, test_fraction"""
|
43
|
+
if not isinstance(self.name, str):
|
44
|
+
raise TypeError("name argument must be a string")
|
45
|
+
|
46
|
+
if not isinstance(self.train, int):
|
47
|
+
raise TypeError("train argument must be an int")
|
48
|
+
|
49
|
+
if not isinstance(self.test_fraction, float):
|
50
|
+
raise TypeError("test_fraction argument must be a float")
|
51
|
+
|
52
|
+
def create_node(self, NetId: int):
|
53
|
+
"""Create a node in the database.
|
54
|
+
Parameters:
|
55
|
+
NetId (int): The ID of the network to which the node belongs.
|
56
|
+
|
57
|
+
Returns:
|
58
|
+
None
|
59
|
+
"""
|
60
|
+
self.engine.execute(
|
61
|
+
text(INSERT_NODE_QUERY.format(self.name, NetId, self.train))
|
62
|
+
)
|
63
|
+
|
64
|
+
def delete_node(self):
|
65
|
+
"""Delete the node from the database."""
|
66
|
+
self.engine.execute(text(DELETE_NODE_QUERY.format(self.name)))
|
67
|
+
|
68
|
+
def check_dataset_compatibility(self, data_df):
|
69
|
+
"""Check if the dataset is compatible with the master dataset.
|
70
|
+
Parameters:
|
71
|
+
data_df (DataFrame): The dataset to check.
|
72
|
+
|
73
|
+
Returns:
|
74
|
+
None
|
75
|
+
"""
|
76
|
+
if master_table_exists() != 1:
|
77
|
+
print("MasterDataset doesn't exist")
|
78
|
+
else:
|
79
|
+
columns = data_df.columns.tolist()
|
80
|
+
|
81
|
+
# get master_dataset columns
|
82
|
+
result_proxy = self.engine.execute(SELECT_MASTER_COLUMNS_QUERY)
|
83
|
+
master_table_columns = result_proxy.keys()
|
84
|
+
|
85
|
+
|
86
|
+
assert [x == y for x, y in zip(master_table_columns, columns)]
|
87
|
+
|
88
|
+
def update_node(self):
|
89
|
+
"""Update the node information (not implemented)."""
|
90
|
+
pass
|
91
|
+
|
92
|
+
def get_dataset(self, column_name: str = None):
|
93
|
+
"""Get the dataset for the node based on the given column name.
|
94
|
+
Parameters:
|
95
|
+
column_name (str, optional): The column name to filter the dataset. Default is None.
|
96
|
+
|
97
|
+
Returns:
|
98
|
+
DataFrame: The dataset associated with the node.
|
99
|
+
"""
|
100
|
+
NodeId = get_nodeid_from_name(self.name)
|
101
|
+
if column_name is not None:
|
102
|
+
query = text(SELECT_DATASET_BY_COLUMN_QUERY.format(column_name, self.name))
|
103
|
+
else:
|
104
|
+
query = text(SELECT_DATASET_BY_NODE_ID_QUERY.format(NodeId))
|
105
|
+
|
106
|
+
result_proxy = self.engine.execute(query)
|
107
|
+
node_dataset = pd.DataFrame(result_proxy.fetchall(), columns=result_proxy.keys())
|
108
|
+
|
109
|
+
return node_dataset
|
110
|
+
|
111
|
+
def upload_dataset(self, dataset_name: str, path_to_csv: str = params['path_to_test_csv']):
|
112
|
+
"""Upload the dataset to the database for the node.
|
113
|
+
|
114
|
+
Parameters:
|
115
|
+
dataset_name (str): The name of the dataset.
|
116
|
+
path_to_csv (str, optional): Path to the CSV file containing the dataset. Default is the path in params.
|
117
|
+
|
118
|
+
Returns:
|
119
|
+
None
|
120
|
+
"""
|
121
|
+
try:
|
122
|
+
data_df = pd.read_csv(path_to_csv)
|
123
|
+
nodeId = get_nodeid_from_name(self.name)
|
124
|
+
columns = data_df.columns.tolist()
|
125
|
+
self.check_dataset_compatibility(data_df)
|
126
|
+
|
127
|
+
data_df = process_eicu(data_df)
|
128
|
+
|
129
|
+
# Insert data in batches
|
130
|
+
batch_size = 1000 # Adjust as needed
|
131
|
+
for start_idx in range(0, len(data_df), batch_size):
|
132
|
+
batch_data = data_df.iloc[start_idx:start_idx + batch_size]
|
133
|
+
insert_query = f"INSERT INTO Datasets (DataSetName, NodeId, {', '.join(columns)}) VALUES (:dataset_name, :nodeId, {', '.join([':' + col for col in columns])})"
|
134
|
+
data_to_insert = batch_data.to_dict(orient='records')
|
135
|
+
params = [{"dataset_name": dataset_name, "nodeId": nodeId, **row} for row in data_to_insert]
|
136
|
+
self.engine.execute(text(insert_query), params)
|
137
|
+
except exc.SQLAlchemyError as e:
|
138
|
+
print(f"Error uploading dataset: {e}")
|
139
|
+
|
140
|
+
def assign_dataset(self, dataset_name:str):
|
141
|
+
"""Assigning existing dataSet to node
|
142
|
+
Parameters:
|
143
|
+
dataset_name (str): The name of the dataset to assign.
|
144
|
+
|
145
|
+
Returns:
|
146
|
+
None
|
147
|
+
"""
|
148
|
+
|
149
|
+
nodeId = get_nodeid_from_name(self.name)
|
150
|
+
query = f"UPDATE DataSets SET nodeId = {nodeId} WHERE DataSetName = '{dataset_name}'"
|
151
|
+
self.engine.execute(text(query))
|
152
|
+
|
153
|
+
def unassign_dataset(self, dataset_name:str):
|
154
|
+
"""unssigning existing dataSet to node
|
155
|
+
Parameters:
|
156
|
+
dataset_name (str): The name of the dataset to assign.
|
157
|
+
|
158
|
+
Returns:
|
159
|
+
None
|
160
|
+
"""
|
161
|
+
|
162
|
+
query = f"UPDATE DataSets SET nodeId = {-1} WHERE DataSetName = '{dataset_name}'"
|
163
|
+
self.engine.execute(text(query))
|
164
|
+
|
165
|
+
def list_alldatasets(self):
|
166
|
+
"""List all datasets associated with the node.
|
167
|
+
Returns:
|
168
|
+
DataFrame: A DataFrame containing information about all datasets associated with the node.
|
169
|
+
|
170
|
+
"""
|
171
|
+
return pd.read_sql(
|
172
|
+
text(SELECT_ALL_DATASETS_QUERY.format(self.name)), my_eng
|
173
|
+
)
|
174
|
+
|
175
|
+
@staticmethod
|
176
|
+
def list_allnodes():
|
177
|
+
"""List all nodes in the database.
|
178
|
+
Returns:
|
179
|
+
DataFrame: A DataFrame containing information about all nodes in the database.
|
180
|
+
|
181
|
+
"""
|
182
|
+
query = text(SELECT_ALL_NODES_QUERY)
|
183
|
+
res = pd.read_sql(query, my_eng)
|
184
|
+
return res
|
Medfl/__init__.py
CHANGED
@@ -1,2 +1,3 @@
|
|
1
|
-
from .LearningManager import *
|
2
|
-
from .NetManager import *
|
1
|
+
from .LearningManager import *
|
2
|
+
from .NetManager import *
|
3
|
+
from .scripts import *
|
Medfl/scripts/base.py
ADDED
@@ -0,0 +1,30 @@
|
|
1
|
+
import mysql.connector
|
2
|
+
from sqlalchemy import create_engine, text
|
3
|
+
from configparser import ConfigParser
|
4
|
+
import yaml
|
5
|
+
import pkg_resources
|
6
|
+
import os
|
7
|
+
|
8
|
+
# Get the directory of the current script
|
9
|
+
current_directory = os.path.dirname(os.path.abspath(__file__))
|
10
|
+
|
11
|
+
# Load configuration from the config file
|
12
|
+
config_file_path = os.path.join(current_directory, 'db_config.ini')
|
13
|
+
|
14
|
+
config = ConfigParser()
|
15
|
+
config.read(config_file_path)
|
16
|
+
mysql_config = config['mysql']
|
17
|
+
|
18
|
+
|
19
|
+
|
20
|
+
connection_string = (
|
21
|
+
f"mysql+mysqlconnector://{mysql_config['user']}:{mysql_config['password']}@"
|
22
|
+
f"{mysql_config['host']}:{mysql_config['port']}/{mysql_config['database']}"
|
23
|
+
)
|
24
|
+
|
25
|
+
eng = create_engine(
|
26
|
+
connection_string,
|
27
|
+
execution_options={"autocommit": True},
|
28
|
+
)
|
29
|
+
|
30
|
+
my_eng = eng.connect()
|
@@ -0,0 +1,126 @@
|
|
1
|
+
import sys
|
2
|
+
import sqlite3
|
3
|
+
import pandas as pd
|
4
|
+
from configparser import ConfigParser
|
5
|
+
import os
|
6
|
+
import ast
|
7
|
+
|
8
|
+
from MEDfl.LearningManager.utils import *
|
9
|
+
|
10
|
+
|
11
|
+
def main(csv_file_path):
|
12
|
+
try:
|
13
|
+
# Get the directory of the current script
|
14
|
+
current_directory = os.path.dirname(os.path.abspath(__file__))
|
15
|
+
|
16
|
+
# Load configuration from the config file
|
17
|
+
# config_file_path = os.path.join(current_directory, 'sqllite_config.ini')*
|
18
|
+
|
19
|
+
config_file_path = load_db_config()
|
20
|
+
|
21
|
+
# config = ConfigParser()
|
22
|
+
# config.read(config_file_path)
|
23
|
+
# sqlite_config = config['sqllite']
|
24
|
+
|
25
|
+
sqlite_config = config_file_path
|
26
|
+
|
27
|
+
|
28
|
+
print('Im here !')
|
29
|
+
|
30
|
+
# Connect to SQLite database (it will be created if it doesn't exist)
|
31
|
+
database_path = sqlite_config['database']
|
32
|
+
conn = sqlite3.connect(database_path)
|
33
|
+
cursor = conn.cursor()
|
34
|
+
|
35
|
+
# Drop each table if it exists
|
36
|
+
tables = ['Networks', 'FLsetup', 'Nodes', 'DataSets', 'FLpipeline', 'testResults', 'FedDatasets']
|
37
|
+
for table in tables:
|
38
|
+
cursor.execute(f"DROP TABLE IF EXISTS {table}")
|
39
|
+
|
40
|
+
# Create Networks table
|
41
|
+
cursor.execute(
|
42
|
+
"CREATE TABLE Networks( \
|
43
|
+
NetId INTEGER PRIMARY KEY AUTOINCREMENT, \
|
44
|
+
NetName TEXT \
|
45
|
+
);"
|
46
|
+
)
|
47
|
+
|
48
|
+
# Create FLsetup table
|
49
|
+
cursor.execute("CREATE TABLE FLsetup (\
|
50
|
+
FLsetupId INTEGER PRIMARY KEY AUTOINCREMENT,\
|
51
|
+
name TEXT NOT NULL, \
|
52
|
+
description TEXT NOT NULL,\
|
53
|
+
creation_date TEXT NOT NULL,\
|
54
|
+
NetId INTEGER NOT NULL,\
|
55
|
+
column_name TEXT\
|
56
|
+
)")
|
57
|
+
|
58
|
+
# Create Nodes table
|
59
|
+
cursor.execute("CREATE TABLE Nodes ( \
|
60
|
+
NodeId INTEGER PRIMARY KEY AUTOINCREMENT,\
|
61
|
+
NodeName TEXT,\
|
62
|
+
train BOOLEAN DEFAULT 1,\
|
63
|
+
NetId INTEGER\
|
64
|
+
)")
|
65
|
+
|
66
|
+
data_df = pd.read_csv(csv_file_path)
|
67
|
+
columns = data_df.columns.tolist()
|
68
|
+
column_map = {"object": "TEXT", "int64": "INTEGER", "float64": "REAL"}
|
69
|
+
sub_query = ", ".join(f"{col} {column_map[str(data_df[col].dtype)]}" for col in columns)
|
70
|
+
|
71
|
+
# Create Datasets table by getting columns from the master csv file
|
72
|
+
cursor.execute(
|
73
|
+
f"CREATE TABLE DataSets( \
|
74
|
+
DataSetId INTEGER PRIMARY KEY AUTOINCREMENT, \
|
75
|
+
DataSetName TEXT, \
|
76
|
+
NodeId INTEGER,\
|
77
|
+
{sub_query}\
|
78
|
+
)"
|
79
|
+
)
|
80
|
+
|
81
|
+
# Create FLpipeline table
|
82
|
+
cursor.execute("CREATE TABLE FLpipeline(\
|
83
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,\
|
84
|
+
name TEXT NOT NULL, \
|
85
|
+
description TEXT NOT NULL,\
|
86
|
+
creation_date TEXT NOT NULL,\
|
87
|
+
results TEXT NOT NULL\
|
88
|
+
) ")
|
89
|
+
|
90
|
+
# Create test results table
|
91
|
+
cursor.execute("CREATE TABLE testResults(\
|
92
|
+
pipelineId INTEGER,\
|
93
|
+
nodename TEXT NOT NULL, \
|
94
|
+
confusionmatrix TEXT,\
|
95
|
+
accuracy REAL,\
|
96
|
+
sensivity REAL,\
|
97
|
+
ppv REAL,\
|
98
|
+
npv REAL,\
|
99
|
+
f1score REAL,\
|
100
|
+
fpr REAL,\
|
101
|
+
tpr REAL, \
|
102
|
+
PRIMARY KEY (pipelineId, nodename)\
|
103
|
+
) ")
|
104
|
+
|
105
|
+
# Create FederatedDataset table
|
106
|
+
cursor.execute("CREATE TABLE FedDatasets (\
|
107
|
+
FedId INTEGER PRIMARY KEY AUTOINCREMENT,\
|
108
|
+
FLsetupId INTEGER,\
|
109
|
+
FLpipeId INTEGER,\
|
110
|
+
name TEXT NOT NULL\
|
111
|
+
)")
|
112
|
+
|
113
|
+
# Commit and close the cursor
|
114
|
+
conn.commit()
|
115
|
+
cursor.close()
|
116
|
+
conn.close()
|
117
|
+
|
118
|
+
except sqlite3.Error as e:
|
119
|
+
print(f"Error: {e}")
|
120
|
+
|
121
|
+
if __name__ == "__main__":
|
122
|
+
if len(sys.argv) != 2:
|
123
|
+
print("Usage: python script.py <path_to_csv_file>")
|
124
|
+
sys.exit(1)
|
125
|
+
csv_file_path = sys.argv[1]
|
126
|
+
main(csv_file_path)
|
alembic/env.py
CHANGED
@@ -1,61 +1,61 @@
|
|
1
|
-
from logging.config import fileConfig
|
2
|
-
import logging
|
3
|
-
from sqlalchemy import engine_from_config, create_engine
|
4
|
-
from sqlalchemy import pool
|
5
|
-
import sys
|
6
|
-
import os
|
7
|
-
from alembic import context
|
8
|
-
|
9
|
-
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
10
|
-
from scripts.base import my_eng
|
11
|
-
|
12
|
-
# this is the Alembic Config object, which provides
|
13
|
-
# access to the values within the .ini file in use.
|
14
|
-
config = context.config
|
15
|
-
|
16
|
-
# Interpret the config file for Python logging.
|
17
|
-
# This line sets up loggers basically.
|
18
|
-
fileConfig(config.config_file_name)
|
19
|
-
|
20
|
-
# add your model's MetaData object here
|
21
|
-
# for 'autogenerate' support
|
22
|
-
# from myapp import mymodel
|
23
|
-
# target_metadata = mymodel.Base.metadata
|
24
|
-
target_metadata = None
|
25
|
-
|
26
|
-
# other values from the config, defined by the needs of env.py,
|
27
|
-
# can be acquired:
|
28
|
-
# my_important_option = config.get_main_option("my_important_option")
|
29
|
-
# ... etc.
|
30
|
-
def configure_logger(name):
|
31
|
-
# This is the standard logging configuration
|
32
|
-
logging.config.fileConfig(
|
33
|
-
'alembic_logging.ini', # Path to your logging configuration file
|
34
|
-
defaults={
|
35
|
-
'logfilename': 'alembic.log', # Log file name
|
36
|
-
},
|
37
|
-
disable_existing_loggers=False,
|
38
|
-
)
|
39
|
-
|
40
|
-
return logging.getLogger(name)
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
def run_migrations_offline():
|
45
|
-
"""Run migrations in 'offline' mode."""
|
46
|
-
pass
|
47
|
-
|
48
|
-
def run_migrations_online():
|
49
|
-
"""Run migrations in 'online' mode."""
|
50
|
-
pass
|
51
|
-
|
52
|
-
if context.is_offline_mode():
|
53
|
-
run_migrations_offline()
|
54
|
-
else:
|
55
|
-
run_migrations_online()
|
56
|
-
|
57
|
-
|
58
|
-
if context.is_offline_mode():
|
59
|
-
run_migrations_offline()
|
60
|
-
else:
|
61
|
-
run_migrations_online()
|
1
|
+
from logging.config import fileConfig
|
2
|
+
import logging
|
3
|
+
from sqlalchemy import engine_from_config, create_engine
|
4
|
+
from sqlalchemy import pool
|
5
|
+
import sys
|
6
|
+
import os
|
7
|
+
from alembic import context
|
8
|
+
|
9
|
+
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
10
|
+
from scripts.base import my_eng
|
11
|
+
|
12
|
+
# this is the Alembic Config object, which provides
|
13
|
+
# access to the values within the .ini file in use.
|
14
|
+
config = context.config
|
15
|
+
|
16
|
+
# Interpret the config file for Python logging.
|
17
|
+
# This line sets up loggers basically.
|
18
|
+
fileConfig(config.config_file_name)
|
19
|
+
|
20
|
+
# add your model's MetaData object here
|
21
|
+
# for 'autogenerate' support
|
22
|
+
# from myapp import mymodel
|
23
|
+
# target_metadata = mymodel.Base.metadata
|
24
|
+
target_metadata = None
|
25
|
+
|
26
|
+
# other values from the config, defined by the needs of env.py,
|
27
|
+
# can be acquired:
|
28
|
+
# my_important_option = config.get_main_option("my_important_option")
|
29
|
+
# ... etc.
|
30
|
+
def configure_logger(name):
|
31
|
+
# This is the standard logging configuration
|
32
|
+
logging.config.fileConfig(
|
33
|
+
'alembic_logging.ini', # Path to your logging configuration file
|
34
|
+
defaults={
|
35
|
+
'logfilename': 'alembic.log', # Log file name
|
36
|
+
},
|
37
|
+
disable_existing_loggers=False,
|
38
|
+
)
|
39
|
+
|
40
|
+
return logging.getLogger(name)
|
41
|
+
|
42
|
+
|
43
|
+
|
44
|
+
def run_migrations_offline():
|
45
|
+
"""Run migrations in 'offline' mode."""
|
46
|
+
pass
|
47
|
+
|
48
|
+
def run_migrations_online():
|
49
|
+
"""Run migrations in 'online' mode."""
|
50
|
+
pass
|
51
|
+
|
52
|
+
if context.is_offline_mode():
|
53
|
+
run_migrations_offline()
|
54
|
+
else:
|
55
|
+
run_migrations_online()
|
56
|
+
|
57
|
+
|
58
|
+
if context.is_offline_mode():
|
59
|
+
run_migrations_offline()
|
60
|
+
else:
|
61
|
+
run_migrations_online()
|