MEDfl 0.2.1__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- MEDfl/LearningManager/__init__.py +13 -13
- MEDfl/LearningManager/client.py +150 -181
- MEDfl/LearningManager/dynamicModal.py +287 -287
- MEDfl/LearningManager/federated_dataset.py +60 -60
- MEDfl/LearningManager/flpipeline.py +192 -192
- MEDfl/LearningManager/model.py +223 -223
- MEDfl/LearningManager/params.yaml +14 -14
- MEDfl/LearningManager/params_optimiser.py +442 -442
- MEDfl/LearningManager/plot.py +229 -229
- MEDfl/LearningManager/server.py +181 -189
- MEDfl/LearningManager/strategy.py +82 -138
- MEDfl/LearningManager/utils.py +331 -331
- MEDfl/NetManager/__init__.py +10 -10
- MEDfl/NetManager/database_connector.py +43 -43
- MEDfl/NetManager/dataset.py +92 -92
- MEDfl/NetManager/flsetup.py +320 -320
- MEDfl/NetManager/net_helper.py +254 -254
- MEDfl/NetManager/net_manager_queries.py +142 -142
- MEDfl/NetManager/network.py +194 -194
- MEDfl/NetManager/node.py +184 -184
- MEDfl/__init__.py +2 -2
- MEDfl/scripts/__init__.py +1 -1
- MEDfl/scripts/base.py +29 -29
- MEDfl/scripts/create_db.py +126 -126
- Medfl/LearningManager/__init__.py +13 -0
- Medfl/LearningManager/client.py +150 -0
- Medfl/LearningManager/dynamicModal.py +287 -0
- Medfl/LearningManager/federated_dataset.py +60 -0
- Medfl/LearningManager/flpipeline.py +192 -0
- Medfl/LearningManager/model.py +223 -0
- Medfl/LearningManager/params.yaml +14 -0
- Medfl/LearningManager/params_optimiser.py +442 -0
- Medfl/LearningManager/plot.py +229 -0
- Medfl/LearningManager/server.py +181 -0
- Medfl/LearningManager/strategy.py +82 -0
- Medfl/LearningManager/utils.py +331 -0
- Medfl/NetManager/__init__.py +10 -0
- Medfl/NetManager/database_connector.py +43 -0
- Medfl/NetManager/dataset.py +92 -0
- Medfl/NetManager/flsetup.py +320 -0
- Medfl/NetManager/net_helper.py +254 -0
- Medfl/NetManager/net_manager_queries.py +142 -0
- Medfl/NetManager/network.py +194 -0
- Medfl/NetManager/node.py +184 -0
- Medfl/__init__.py +3 -0
- Medfl/scripts/__init__.py +2 -0
- Medfl/scripts/base.py +30 -0
- Medfl/scripts/create_db.py +126 -0
- alembic/env.py +61 -61
- {MEDfl-0.2.1.dist-info → medfl-2.0.0.dist-info}/METADATA +120 -108
- medfl-2.0.0.dist-info/RECORD +55 -0
- {MEDfl-0.2.1.dist-info → medfl-2.0.0.dist-info}/WHEEL +1 -1
- {MEDfl-0.2.1.dist-info → medfl-2.0.0.dist-info/licenses}/LICENSE +674 -674
- MEDfl-0.2.1.dist-info/RECORD +0 -31
- {MEDfl-0.2.1.dist-info → medfl-2.0.0.dist-info}/top_level.txt +0 -0
@@ -1,142 +1,142 @@
|
|
1
|
-
from .net_helper import is_str
|
2
|
-
|
3
|
-
INSERT_DATASET = """
|
4
|
-
INSERT INTO DataSets(DataSetName, NodeId, {columns})
|
5
|
-
VALUES (:name, :NodeId, {values})
|
6
|
-
"""
|
7
|
-
DELETE_DATASET = """
|
8
|
-
DELETE FROM DataSets WHERE DataSetName = :name
|
9
|
-
"""
|
10
|
-
|
11
|
-
SELECT_ALL_DATASET_NAMES = """
|
12
|
-
SELECT DISTINCT DataSetName,NodeId FROM DataSets
|
13
|
-
"""
|
14
|
-
|
15
|
-
SELECT_DATASET_BY_NAME = """
|
16
|
-
SELECT * FROM DataSets WHERE DataSetName = :name
|
17
|
-
"""
|
18
|
-
|
19
|
-
# node queries
|
20
|
-
# sql_queries.py
|
21
|
-
|
22
|
-
INSERT_NODE_QUERY = (
|
23
|
-
"INSERT INTO Nodes(NodeName,NetId,train) VALUES ('{}',{}, {})"
|
24
|
-
)
|
25
|
-
DELETE_NODE_QUERY = "DELETE FROM Nodes WHERE NodeName = '{}'"
|
26
|
-
SELECT_MASTER_COLUMNS_QUERY = "SELECT * FROM MasterDataset LIMIT 1"
|
27
|
-
SELECT_DATASET_BY_COLUMN_QUERY = "SELECT * FROM MasterDataset WHERE {} = '{}'"
|
28
|
-
SELECT_DATASET_BY_NODE_ID_QUERY = "SELECT * FROM DataSets WHERE NodeId = {}"
|
29
|
-
|
30
|
-
SELECT_ALL_DATASETS_QUERY = "SELECT DISTINCT DataSetName,NodeName FROM DataSets,Nodes WHERE Nodes.NodeName = '{}' and Nodes.NodeId = DataSets.NodeId"
|
31
|
-
SELECT_ALL_NODES_QUERY = "SELECT * FROM Nodes"
|
32
|
-
|
33
|
-
|
34
|
-
# SQL query to insert a new network
|
35
|
-
INSERT_NETWORK_QUERY = "INSERT INTO Networks(NetName) VALUES (:name)"
|
36
|
-
|
37
|
-
# SQL query to delete a network
|
38
|
-
DELETE_NETWORK_QUERY = "DELETE FROM Networks WHERE NetName = '{name}'"
|
39
|
-
|
40
|
-
# SQL query to delete a network
|
41
|
-
GET_NETWORK_QUERY = "SELECT * FROM Networks WHERE NetName = '{name}'"
|
42
|
-
|
43
|
-
|
44
|
-
# SQL query to update a network
|
45
|
-
UPDATE_NETWORK_QUERY = (
|
46
|
-
"UPDATE Networks SET FLsetupId = {FLsetupId} WHERE NetId = {id}"
|
47
|
-
)
|
48
|
-
|
49
|
-
# SQL query to retrieve all nodes for a network
|
50
|
-
LIST_ALL_NODES_QUERY = """
|
51
|
-
SELECT Nodes.NodeName, Networks.NetName
|
52
|
-
FROM Nodes
|
53
|
-
JOIN Networks ON Networks.NetId = Nodes.NetId
|
54
|
-
WHERE Networks.NetName = :name
|
55
|
-
"""
|
56
|
-
|
57
|
-
# SQL query to create the MasterDataset table (SQLite-compatible)
|
58
|
-
CREATE_MASTER_DATASET_TABLE_QUERY = """
|
59
|
-
CREATE TABLE IF NOT EXISTS MasterDataset (
|
60
|
-
PatientId INTEGER PRIMARY KEY AUTOINCREMENT,
|
61
|
-
{}
|
62
|
-
);
|
63
|
-
"""
|
64
|
-
|
65
|
-
|
66
|
-
# SQL query to create the datasets table (SQLite-compatible)
|
67
|
-
CREATE_DATASETS_TABLE_QUERY = """
|
68
|
-
CREATE TABLE IF NOT EXISTS Datasets (
|
69
|
-
DataSetId INTEGER PRIMARY KEY AUTOINCREMENT,
|
70
|
-
DataSetName VARCHAR(255),
|
71
|
-
NodeId INT,
|
72
|
-
{}
|
73
|
-
);
|
74
|
-
"""
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
# SQL query to insert dataset values
|
79
|
-
INSERT_DATASET_VALUES_QUERY = "INSERT INTO MasterDataset({columns}, NodeId) VALUES ('{name}', {nodeId}, {values})"
|
80
|
-
|
81
|
-
|
82
|
-
# FL setup_queries
|
83
|
-
# sql_queries.py
|
84
|
-
|
85
|
-
CREATE_FLSETUP_QUERY = """
|
86
|
-
INSERT INTO FLsetup (name, description, creation_date, NetId, column_name)
|
87
|
-
VALUES (:name, :description, :creation_date, :net_id, :column_name)
|
88
|
-
"""
|
89
|
-
|
90
|
-
DELETE_FLSETUP_QUERY = """
|
91
|
-
DELETE FROM FLsetup
|
92
|
-
WHERE name = :name
|
93
|
-
"""
|
94
|
-
|
95
|
-
UPDATE_FLSETUP_QUERY = UPDATE_NETWORK_QUERY = (
|
96
|
-
"UPDATE FLsetup SET column_name ='{column_name}' WHERE name ='{FLsetupName}'"
|
97
|
-
)
|
98
|
-
|
99
|
-
|
100
|
-
READ_SETUP_QUERY = """
|
101
|
-
SELECT * FROM FLsetup
|
102
|
-
WHERE FLsetupId = :flsetup_id
|
103
|
-
"""
|
104
|
-
|
105
|
-
READ_ALL_SETUPS_QUERY = """
|
106
|
-
SELECT * FROM FLsetup
|
107
|
-
"""
|
108
|
-
|
109
|
-
READ_NETWORK_BY_ID_QUERY = """
|
110
|
-
SELECT * FROM Networks
|
111
|
-
WHERE NetId = :net_id
|
112
|
-
"""
|
113
|
-
|
114
|
-
READ_DISTINCT_NODES_QUERY = """
|
115
|
-
SELECT DISTINCT {} FROM MasterDataset
|
116
|
-
"""
|
117
|
-
|
118
|
-
|
119
|
-
# FederatedDataset Queries
|
120
|
-
INSERT_FLDATASET_QUERY = (
|
121
|
-
"INSERT INTO FedDatasets(name, FLsetupId) VALUES (:name, :FLsetupId)"
|
122
|
-
)
|
123
|
-
DELETE_FLDATASET_BY_SETUP_AND_PIPELINE_QUERY = "DELETE FROM FedDatasets WHERE FLsetupId = :FLsetupId AND FLpipeId = :FLpipeId"
|
124
|
-
|
125
|
-
|
126
|
-
UPDATE_FLDATASET_QUERY = (
|
127
|
-
"UPDATE FedDatasets SET FLpipeId = :FLpipeId WHERE FedId = :FedId"
|
128
|
-
)
|
129
|
-
SELECT_FLDATASET_BY_NAME_QUERY = "SELECT * FROM FedDatasets WHERE name = :name"
|
130
|
-
|
131
|
-
CREATE_FLPIPELINE_QUERY = """
|
132
|
-
INSERT INTO FLpipeline (name, description, creation_date, results)
|
133
|
-
VALUES ('{name}', '{description}', '{creation_date}', '{result}')
|
134
|
-
"""
|
135
|
-
DELETE_FLPIPELINE_QUERY = "DELETE FROM FLpipeline WHERE name = '{name}'"
|
136
|
-
|
137
|
-
SELECT_FLPIPELINE_QUERY = "SELECT FROM FLpipeline WHERE name = '{name}'"
|
138
|
-
|
139
|
-
CREATE_TEST_RESULTS_QUERY = """
|
140
|
-
INSERT INTO testResults (pipelineid, nodename, confusionmatrix, accuracy , sensivity, ppv , npv , f1score , fpr , tpr )
|
141
|
-
VALUES ('{pipelineId}', '{nodeName}', '{confusion_matrix}', '{accuracy}' , '{sensivity}' , '{ppv}' , '{npv}' , '{f1score}' , '{fpr}' , '{tpr}')
|
142
|
-
"""
|
1
|
+
from .net_helper import is_str
|
2
|
+
|
3
|
+
INSERT_DATASET = """
|
4
|
+
INSERT INTO DataSets(DataSetName, NodeId, {columns})
|
5
|
+
VALUES (:name, :NodeId, {values})
|
6
|
+
"""
|
7
|
+
DELETE_DATASET = """
|
8
|
+
DELETE FROM DataSets WHERE DataSetName = :name
|
9
|
+
"""
|
10
|
+
|
11
|
+
SELECT_ALL_DATASET_NAMES = """
|
12
|
+
SELECT DISTINCT DataSetName,NodeId FROM DataSets
|
13
|
+
"""
|
14
|
+
|
15
|
+
SELECT_DATASET_BY_NAME = """
|
16
|
+
SELECT * FROM DataSets WHERE DataSetName = :name
|
17
|
+
"""
|
18
|
+
|
19
|
+
# node queries
|
20
|
+
# sql_queries.py
|
21
|
+
|
22
|
+
INSERT_NODE_QUERY = (
|
23
|
+
"INSERT INTO Nodes(NodeName,NetId,train) VALUES ('{}',{}, {})"
|
24
|
+
)
|
25
|
+
DELETE_NODE_QUERY = "DELETE FROM Nodes WHERE NodeName = '{}'"
|
26
|
+
SELECT_MASTER_COLUMNS_QUERY = "SELECT * FROM MasterDataset LIMIT 1"
|
27
|
+
SELECT_DATASET_BY_COLUMN_QUERY = "SELECT * FROM MasterDataset WHERE {} = '{}'"
|
28
|
+
SELECT_DATASET_BY_NODE_ID_QUERY = "SELECT * FROM DataSets WHERE NodeId = {}"
|
29
|
+
|
30
|
+
SELECT_ALL_DATASETS_QUERY = "SELECT DISTINCT DataSetName,NodeName FROM DataSets,Nodes WHERE Nodes.NodeName = '{}' and Nodes.NodeId = DataSets.NodeId"
|
31
|
+
SELECT_ALL_NODES_QUERY = "SELECT * FROM Nodes"
|
32
|
+
|
33
|
+
|
34
|
+
# SQL query to insert a new network
|
35
|
+
INSERT_NETWORK_QUERY = "INSERT INTO Networks(NetName) VALUES (:name)"
|
36
|
+
|
37
|
+
# SQL query to delete a network
|
38
|
+
DELETE_NETWORK_QUERY = "DELETE FROM Networks WHERE NetName = '{name}'"
|
39
|
+
|
40
|
+
# SQL query to delete a network
|
41
|
+
GET_NETWORK_QUERY = "SELECT * FROM Networks WHERE NetName = '{name}'"
|
42
|
+
|
43
|
+
|
44
|
+
# SQL query to update a network
|
45
|
+
UPDATE_NETWORK_QUERY = (
|
46
|
+
"UPDATE Networks SET FLsetupId = {FLsetupId} WHERE NetId = {id}"
|
47
|
+
)
|
48
|
+
|
49
|
+
# SQL query to retrieve all nodes for a network
|
50
|
+
LIST_ALL_NODES_QUERY = """
|
51
|
+
SELECT Nodes.NodeName, Networks.NetName
|
52
|
+
FROM Nodes
|
53
|
+
JOIN Networks ON Networks.NetId = Nodes.NetId
|
54
|
+
WHERE Networks.NetName = :name
|
55
|
+
"""
|
56
|
+
|
57
|
+
# SQL query to create the MasterDataset table (SQLite-compatible)
|
58
|
+
CREATE_MASTER_DATASET_TABLE_QUERY = """
|
59
|
+
CREATE TABLE IF NOT EXISTS MasterDataset (
|
60
|
+
PatientId INTEGER PRIMARY KEY AUTOINCREMENT,
|
61
|
+
{}
|
62
|
+
);
|
63
|
+
"""
|
64
|
+
|
65
|
+
|
66
|
+
# SQL query to create the datasets table (SQLite-compatible)
|
67
|
+
CREATE_DATASETS_TABLE_QUERY = """
|
68
|
+
CREATE TABLE IF NOT EXISTS Datasets (
|
69
|
+
DataSetId INTEGER PRIMARY KEY AUTOINCREMENT,
|
70
|
+
DataSetName VARCHAR(255),
|
71
|
+
NodeId INT,
|
72
|
+
{}
|
73
|
+
);
|
74
|
+
"""
|
75
|
+
|
76
|
+
|
77
|
+
|
78
|
+
# SQL query to insert dataset values
|
79
|
+
INSERT_DATASET_VALUES_QUERY = "INSERT INTO MasterDataset({columns}, NodeId) VALUES ('{name}', {nodeId}, {values})"
|
80
|
+
|
81
|
+
|
82
|
+
# FL setup_queries
|
83
|
+
# sql_queries.py
|
84
|
+
|
85
|
+
CREATE_FLSETUP_QUERY = """
|
86
|
+
INSERT INTO FLsetup (name, description, creation_date, NetId, column_name)
|
87
|
+
VALUES (:name, :description, :creation_date, :net_id, :column_name)
|
88
|
+
"""
|
89
|
+
|
90
|
+
DELETE_FLSETUP_QUERY = """
|
91
|
+
DELETE FROM FLsetup
|
92
|
+
WHERE name = :name
|
93
|
+
"""
|
94
|
+
|
95
|
+
UPDATE_FLSETUP_QUERY = UPDATE_NETWORK_QUERY = (
|
96
|
+
"UPDATE FLsetup SET column_name ='{column_name}' WHERE name ='{FLsetupName}'"
|
97
|
+
)
|
98
|
+
|
99
|
+
|
100
|
+
READ_SETUP_QUERY = """
|
101
|
+
SELECT * FROM FLsetup
|
102
|
+
WHERE FLsetupId = :flsetup_id
|
103
|
+
"""
|
104
|
+
|
105
|
+
READ_ALL_SETUPS_QUERY = """
|
106
|
+
SELECT * FROM FLsetup
|
107
|
+
"""
|
108
|
+
|
109
|
+
READ_NETWORK_BY_ID_QUERY = """
|
110
|
+
SELECT * FROM Networks
|
111
|
+
WHERE NetId = :net_id
|
112
|
+
"""
|
113
|
+
|
114
|
+
READ_DISTINCT_NODES_QUERY = """
|
115
|
+
SELECT DISTINCT {} FROM MasterDataset
|
116
|
+
"""
|
117
|
+
|
118
|
+
|
119
|
+
# FederatedDataset Queries
|
120
|
+
INSERT_FLDATASET_QUERY = (
|
121
|
+
"INSERT INTO FedDatasets(name, FLsetupId) VALUES (:name, :FLsetupId)"
|
122
|
+
)
|
123
|
+
DELETE_FLDATASET_BY_SETUP_AND_PIPELINE_QUERY = "DELETE FROM FedDatasets WHERE FLsetupId = :FLsetupId AND FLpipeId = :FLpipeId"
|
124
|
+
|
125
|
+
|
126
|
+
UPDATE_FLDATASET_QUERY = (
|
127
|
+
"UPDATE FedDatasets SET FLpipeId = :FLpipeId WHERE FedId = :FedId"
|
128
|
+
)
|
129
|
+
SELECT_FLDATASET_BY_NAME_QUERY = "SELECT * FROM FedDatasets WHERE name = :name"
|
130
|
+
|
131
|
+
CREATE_FLPIPELINE_QUERY = """
|
132
|
+
INSERT INTO FLpipeline (name, description, creation_date, results)
|
133
|
+
VALUES ('{name}', '{description}', '{creation_date}', '{result}')
|
134
|
+
"""
|
135
|
+
DELETE_FLPIPELINE_QUERY = "DELETE FROM FLpipeline WHERE name = '{name}'"
|
136
|
+
|
137
|
+
SELECT_FLPIPELINE_QUERY = "SELECT FROM FLpipeline WHERE name = '{name}'"
|
138
|
+
|
139
|
+
CREATE_TEST_RESULTS_QUERY = """
|
140
|
+
INSERT INTO testResults (pipelineid, nodename, confusionmatrix, accuracy , sensivity, ppv , npv , f1score , fpr , tpr )
|
141
|
+
VALUES ('{pipelineId}', '{nodeName}', '{confusion_matrix}', '{accuracy}' , '{sensivity}' , '{ppv}' , '{npv}' , '{f1score}' , '{fpr}' , '{tpr}')
|
142
|
+
"""
|
MEDfl/NetManager/network.py
CHANGED
@@ -1,194 +1,194 @@
|
|
1
|
-
# src/MEDfl/NetManager/network.py
|
2
|
-
|
3
|
-
from MEDfl.LearningManager.utils import *
|
4
|
-
from .net_helper import *
|
5
|
-
from .net_manager_queries import (CREATE_MASTER_DATASET_TABLE_QUERY,
|
6
|
-
CREATE_DATASETS_TABLE_QUERY,
|
7
|
-
DELETE_NETWORK_QUERY,
|
8
|
-
INSERT_NETWORK_QUERY, LIST_ALL_NODES_QUERY,
|
9
|
-
UPDATE_NETWORK_QUERY, GET_NETWORK_QUERY)
|
10
|
-
from .node import Node
|
11
|
-
import pandas as pd
|
12
|
-
from MEDfl.LearningManager.utils import params
|
13
|
-
|
14
|
-
from sqlalchemy import text
|
15
|
-
from sqlalchemy.exc import SQLAlchemyError
|
16
|
-
|
17
|
-
class Network:
|
18
|
-
"""
|
19
|
-
A class representing a network.
|
20
|
-
|
21
|
-
Attributes:
|
22
|
-
name (str): The name of the network.
|
23
|
-
mtable_exists (int): An integer flag indicating whether the MasterDataset table exists (1) or not (0).
|
24
|
-
"""
|
25
|
-
|
26
|
-
def __init__(self, name: str = ""):
|
27
|
-
"""
|
28
|
-
Initialize a Network instance.
|
29
|
-
|
30
|
-
Parameters:
|
31
|
-
name (str): The name of the network.
|
32
|
-
"""
|
33
|
-
self.name = name
|
34
|
-
self.mtable_exists = int(master_table_exists())
|
35
|
-
self.validate()
|
36
|
-
|
37
|
-
db_manager = DatabaseManager()
|
38
|
-
db_manager.connect()
|
39
|
-
self.eng = db_manager.get_connection()
|
40
|
-
|
41
|
-
def validate(self):
|
42
|
-
"""Validate name"""
|
43
|
-
|
44
|
-
if not isinstance(self.name, str):
|
45
|
-
raise TypeError("name argument must be a string")
|
46
|
-
|
47
|
-
def create_network(self):
|
48
|
-
"""Create a new network in the database."""
|
49
|
-
try:
|
50
|
-
print(self.name)
|
51
|
-
self.eng.execute(text(INSERT_NETWORK_QUERY), {"name": self.name})
|
52
|
-
self.id = self.get_netid_from_name(self.name)
|
53
|
-
except SQLAlchemyError as e:
|
54
|
-
print(f"Error creating network: {e}")
|
55
|
-
|
56
|
-
def use_network(self, network_name: str):
|
57
|
-
"""Use a network in the database.
|
58
|
-
|
59
|
-
Parameters:
|
60
|
-
network_name (str): The name of the network to use.
|
61
|
-
|
62
|
-
Returns:
|
63
|
-
Network or None: An instance of the Network class if the network exists, else None.
|
64
|
-
"""
|
65
|
-
try:
|
66
|
-
network = pd.read_sql(
|
67
|
-
text(GET_NETWORK_QUERY),
|
68
|
-
self.eng,
|
69
|
-
params={"name": network_name}
|
70
|
-
)
|
71
|
-
if not network.empty:
|
72
|
-
self.name = network.iloc[0]['NetName']
|
73
|
-
self.id = network.iloc[0]['NetId']
|
74
|
-
self.mtable_exists = int(master_table_exists())
|
75
|
-
self.validate()
|
76
|
-
return self
|
77
|
-
else:
|
78
|
-
return None
|
79
|
-
except SQLAlchemyError as e:
|
80
|
-
print(f"Error using network: {e}")
|
81
|
-
return None
|
82
|
-
|
83
|
-
def delete_network(self):
|
84
|
-
"""Delete the network from the database."""
|
85
|
-
try:
|
86
|
-
self.eng.execute(text(DELETE_NETWORK_QUERY), {"name": self.name})
|
87
|
-
except SQLAlchemyError as e:
|
88
|
-
print(f"Error deleting network: {e}")
|
89
|
-
|
90
|
-
def update_network(self, FLsetupId: int):
|
91
|
-
"""Update the network's FLsetupId in the database.
|
92
|
-
|
93
|
-
Parameters:
|
94
|
-
FLsetupId (int): The FLsetupId to update.
|
95
|
-
"""
|
96
|
-
try:
|
97
|
-
self.eng.execute(
|
98
|
-
text(UPDATE_NETWORK_QUERY),
|
99
|
-
{"FLsetupId": FLsetupId, "id": self.id}
|
100
|
-
)
|
101
|
-
except SQLAlchemyError as e:
|
102
|
-
print(f"Error updating network: {e}")
|
103
|
-
|
104
|
-
def add_node(self, node: Node):
|
105
|
-
"""Add a node to the network.
|
106
|
-
|
107
|
-
Parameters:
|
108
|
-
node (Node): The node to add.
|
109
|
-
"""
|
110
|
-
node.create_node(self.id)
|
111
|
-
|
112
|
-
def list_allnodes(self):
|
113
|
-
"""List all nodes in the network.
|
114
|
-
|
115
|
-
Returns:
|
116
|
-
DataFrame: A DataFrame containing information about all nodes in the network.
|
117
|
-
"""
|
118
|
-
try:
|
119
|
-
query = text(LIST_ALL_NODES_QUERY)
|
120
|
-
result_proxy = self.eng.execute(query, name=self.name)
|
121
|
-
result_df = pd.DataFrame(result_proxy.fetchall(), columns=result_proxy.keys())
|
122
|
-
return result_df
|
123
|
-
except SQLAlchemyError as e:
|
124
|
-
print(f"Error listing all nodes: {e}")
|
125
|
-
return pd.DataFrame()
|
126
|
-
|
127
|
-
def create_master_dataset(self, path_to_csv: str = params['path_to_master_csv']):
|
128
|
-
"""
|
129
|
-
Create the MasterDataset table and insert dataset values.
|
130
|
-
|
131
|
-
:param path_to_csv: Path to the CSV file containing the dataset.
|
132
|
-
"""
|
133
|
-
try:
|
134
|
-
print(path_to_csv)
|
135
|
-
data_df = pd.read_csv(path_to_csv)
|
136
|
-
|
137
|
-
if self.mtable_exists != 1:
|
138
|
-
columns = data_df.columns.tolist()
|
139
|
-
columns_str = ",\n".join(
|
140
|
-
[
|
141
|
-
f"{col} {column_map[str(data_df[col].dtype)]}"
|
142
|
-
for col in columns
|
143
|
-
]
|
144
|
-
)
|
145
|
-
self.eng.execute(
|
146
|
-
text(CREATE_MASTER_DATASET_TABLE_QUERY.format(columns_str))
|
147
|
-
)
|
148
|
-
self.eng.execute(text(CREATE_DATASETS_TABLE_QUERY.format(columns_str)))
|
149
|
-
|
150
|
-
# Process data
|
151
|
-
data_df = process_eicu(data_df)
|
152
|
-
|
153
|
-
# Insert data in batches
|
154
|
-
batch_size = 1000 # Adjust as needed
|
155
|
-
for start_idx in range(0, len(data_df), batch_size):
|
156
|
-
batch_data = data_df.iloc[start_idx:start_idx + batch_size]
|
157
|
-
insert_query = f"INSERT INTO MasterDataset ({', '.join(columns)}) VALUES ({', '.join([':' + col for col in columns])})"
|
158
|
-
data_to_insert = batch_data.to_dict(orient='records')
|
159
|
-
self.eng.execute(text(insert_query), data_to_insert)
|
160
|
-
|
161
|
-
self.mtable_exists = 1
|
162
|
-
except SQLAlchemyError as e:
|
163
|
-
print(f"Error creating master dataset: {e}")
|
164
|
-
|
165
|
-
@staticmethod
|
166
|
-
def list_allnetworks():
|
167
|
-
"""List all networks in the database.
|
168
|
-
|
169
|
-
Returns:
|
170
|
-
DataFrame: A DataFrame containing information about all networks in the database.
|
171
|
-
"""
|
172
|
-
try:
|
173
|
-
db_manager = DatabaseManager()
|
174
|
-
db_manager.connect()
|
175
|
-
my_eng = db_manager.get_connection()
|
176
|
-
|
177
|
-
result_proxy = my_eng.execute("SELECT * FROM Networks")
|
178
|
-
result = result_proxy.fetchall()
|
179
|
-
return pd.DataFrame(result, columns=result_proxy.keys())
|
180
|
-
except SQLAlchemyError as e:
|
181
|
-
print(f"Error listing all networks: {e}")
|
182
|
-
return pd.DataFrame()
|
183
|
-
|
184
|
-
def get_netid_from_name(self, name):
|
185
|
-
"""Get network ID from network name."""
|
186
|
-
try:
|
187
|
-
result = self.eng.execute(text("SELECT NetId FROM Networks WHERE NetName = :name"), {"name": name}).fetchone()
|
188
|
-
if result:
|
189
|
-
return result[0]
|
190
|
-
else:
|
191
|
-
return None
|
192
|
-
except SQLAlchemyError as e:
|
193
|
-
print(f"Error fetching network ID: {e}")
|
194
|
-
return None
|
1
|
+
# src/MEDfl/NetManager/network.py
|
2
|
+
|
3
|
+
from MEDfl.LearningManager.utils import *
|
4
|
+
from .net_helper import *
|
5
|
+
from .net_manager_queries import (CREATE_MASTER_DATASET_TABLE_QUERY,
|
6
|
+
CREATE_DATASETS_TABLE_QUERY,
|
7
|
+
DELETE_NETWORK_QUERY,
|
8
|
+
INSERT_NETWORK_QUERY, LIST_ALL_NODES_QUERY,
|
9
|
+
UPDATE_NETWORK_QUERY, GET_NETWORK_QUERY)
|
10
|
+
from .node import Node
|
11
|
+
import pandas as pd
|
12
|
+
from MEDfl.LearningManager.utils import params
|
13
|
+
|
14
|
+
from sqlalchemy import text
|
15
|
+
from sqlalchemy.exc import SQLAlchemyError
|
16
|
+
|
17
|
+
class Network:
|
18
|
+
"""
|
19
|
+
A class representing a network.
|
20
|
+
|
21
|
+
Attributes:
|
22
|
+
name (str): The name of the network.
|
23
|
+
mtable_exists (int): An integer flag indicating whether the MasterDataset table exists (1) or not (0).
|
24
|
+
"""
|
25
|
+
|
26
|
+
def __init__(self, name: str = ""):
|
27
|
+
"""
|
28
|
+
Initialize a Network instance.
|
29
|
+
|
30
|
+
Parameters:
|
31
|
+
name (str): The name of the network.
|
32
|
+
"""
|
33
|
+
self.name = name
|
34
|
+
self.mtable_exists = int(master_table_exists())
|
35
|
+
self.validate()
|
36
|
+
|
37
|
+
db_manager = DatabaseManager()
|
38
|
+
db_manager.connect()
|
39
|
+
self.eng = db_manager.get_connection()
|
40
|
+
|
41
|
+
def validate(self):
|
42
|
+
"""Validate name"""
|
43
|
+
|
44
|
+
if not isinstance(self.name, str):
|
45
|
+
raise TypeError("name argument must be a string")
|
46
|
+
|
47
|
+
def create_network(self):
|
48
|
+
"""Create a new network in the database."""
|
49
|
+
try:
|
50
|
+
print(self.name)
|
51
|
+
self.eng.execute(text(INSERT_NETWORK_QUERY), {"name": self.name})
|
52
|
+
self.id = self.get_netid_from_name(self.name)
|
53
|
+
except SQLAlchemyError as e:
|
54
|
+
print(f"Error creating network: {e}")
|
55
|
+
|
56
|
+
def use_network(self, network_name: str):
|
57
|
+
"""Use a network in the database.
|
58
|
+
|
59
|
+
Parameters:
|
60
|
+
network_name (str): The name of the network to use.
|
61
|
+
|
62
|
+
Returns:
|
63
|
+
Network or None: An instance of the Network class if the network exists, else None.
|
64
|
+
"""
|
65
|
+
try:
|
66
|
+
network = pd.read_sql(
|
67
|
+
text(GET_NETWORK_QUERY),
|
68
|
+
self.eng,
|
69
|
+
params={"name": network_name}
|
70
|
+
)
|
71
|
+
if not network.empty:
|
72
|
+
self.name = network.iloc[0]['NetName']
|
73
|
+
self.id = network.iloc[0]['NetId']
|
74
|
+
self.mtable_exists = int(master_table_exists())
|
75
|
+
self.validate()
|
76
|
+
return self
|
77
|
+
else:
|
78
|
+
return None
|
79
|
+
except SQLAlchemyError as e:
|
80
|
+
print(f"Error using network: {e}")
|
81
|
+
return None
|
82
|
+
|
83
|
+
def delete_network(self):
|
84
|
+
"""Delete the network from the database."""
|
85
|
+
try:
|
86
|
+
self.eng.execute(text(DELETE_NETWORK_QUERY), {"name": self.name})
|
87
|
+
except SQLAlchemyError as e:
|
88
|
+
print(f"Error deleting network: {e}")
|
89
|
+
|
90
|
+
def update_network(self, FLsetupId: int):
|
91
|
+
"""Update the network's FLsetupId in the database.
|
92
|
+
|
93
|
+
Parameters:
|
94
|
+
FLsetupId (int): The FLsetupId to update.
|
95
|
+
"""
|
96
|
+
try:
|
97
|
+
self.eng.execute(
|
98
|
+
text(UPDATE_NETWORK_QUERY),
|
99
|
+
{"FLsetupId": FLsetupId, "id": self.id}
|
100
|
+
)
|
101
|
+
except SQLAlchemyError as e:
|
102
|
+
print(f"Error updating network: {e}")
|
103
|
+
|
104
|
+
def add_node(self, node: Node):
|
105
|
+
"""Add a node to the network.
|
106
|
+
|
107
|
+
Parameters:
|
108
|
+
node (Node): The node to add.
|
109
|
+
"""
|
110
|
+
node.create_node(self.id)
|
111
|
+
|
112
|
+
def list_allnodes(self):
|
113
|
+
"""List all nodes in the network.
|
114
|
+
|
115
|
+
Returns:
|
116
|
+
DataFrame: A DataFrame containing information about all nodes in the network.
|
117
|
+
"""
|
118
|
+
try:
|
119
|
+
query = text(LIST_ALL_NODES_QUERY)
|
120
|
+
result_proxy = self.eng.execute(query, name=self.name)
|
121
|
+
result_df = pd.DataFrame(result_proxy.fetchall(), columns=result_proxy.keys())
|
122
|
+
return result_df
|
123
|
+
except SQLAlchemyError as e:
|
124
|
+
print(f"Error listing all nodes: {e}")
|
125
|
+
return pd.DataFrame()
|
126
|
+
|
127
|
+
def create_master_dataset(self, path_to_csv: str = params['path_to_master_csv']):
|
128
|
+
"""
|
129
|
+
Create the MasterDataset table and insert dataset values.
|
130
|
+
|
131
|
+
:param path_to_csv: Path to the CSV file containing the dataset.
|
132
|
+
"""
|
133
|
+
try:
|
134
|
+
print(path_to_csv)
|
135
|
+
data_df = pd.read_csv(path_to_csv)
|
136
|
+
|
137
|
+
if self.mtable_exists != 1:
|
138
|
+
columns = data_df.columns.tolist()
|
139
|
+
columns_str = ",\n".join(
|
140
|
+
[
|
141
|
+
f"{col} {column_map[str(data_df[col].dtype)]}"
|
142
|
+
for col in columns
|
143
|
+
]
|
144
|
+
)
|
145
|
+
self.eng.execute(
|
146
|
+
text(CREATE_MASTER_DATASET_TABLE_QUERY.format(columns_str))
|
147
|
+
)
|
148
|
+
self.eng.execute(text(CREATE_DATASETS_TABLE_QUERY.format(columns_str)))
|
149
|
+
|
150
|
+
# Process data
|
151
|
+
data_df = process_eicu(data_df)
|
152
|
+
|
153
|
+
# Insert data in batches
|
154
|
+
batch_size = 1000 # Adjust as needed
|
155
|
+
for start_idx in range(0, len(data_df), batch_size):
|
156
|
+
batch_data = data_df.iloc[start_idx:start_idx + batch_size]
|
157
|
+
insert_query = f"INSERT INTO MasterDataset ({', '.join(columns)}) VALUES ({', '.join([':' + col for col in columns])})"
|
158
|
+
data_to_insert = batch_data.to_dict(orient='records')
|
159
|
+
self.eng.execute(text(insert_query), data_to_insert)
|
160
|
+
|
161
|
+
self.mtable_exists = 1
|
162
|
+
except SQLAlchemyError as e:
|
163
|
+
print(f"Error creating master dataset: {e}")
|
164
|
+
|
165
|
+
@staticmethod
|
166
|
+
def list_allnetworks():
|
167
|
+
"""List all networks in the database.
|
168
|
+
|
169
|
+
Returns:
|
170
|
+
DataFrame: A DataFrame containing information about all networks in the database.
|
171
|
+
"""
|
172
|
+
try:
|
173
|
+
db_manager = DatabaseManager()
|
174
|
+
db_manager.connect()
|
175
|
+
my_eng = db_manager.get_connection()
|
176
|
+
|
177
|
+
result_proxy = my_eng.execute("SELECT * FROM Networks")
|
178
|
+
result = result_proxy.fetchall()
|
179
|
+
return pd.DataFrame(result, columns=result_proxy.keys())
|
180
|
+
except SQLAlchemyError as e:
|
181
|
+
print(f"Error listing all networks: {e}")
|
182
|
+
return pd.DataFrame()
|
183
|
+
|
184
|
+
def get_netid_from_name(self, name):
|
185
|
+
"""Get network ID from network name."""
|
186
|
+
try:
|
187
|
+
result = self.eng.execute(text("SELECT NetId FROM Networks WHERE NetName = :name"), {"name": name}).fetchone()
|
188
|
+
if result:
|
189
|
+
return result[0]
|
190
|
+
else:
|
191
|
+
return None
|
192
|
+
except SQLAlchemyError as e:
|
193
|
+
print(f"Error fetching network ID: {e}")
|
194
|
+
return None
|