MEDfl 0.1.31__py3-none-any.whl → 0.1.33__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. {MEDfl-0.1.31.dist-info → MEDfl-0.1.33.dist-info}/METADATA +127 -128
  2. MEDfl-0.1.33.dist-info/RECORD +34 -0
  3. {MEDfl-0.1.31.dist-info → MEDfl-0.1.33.dist-info}/WHEEL +1 -1
  4. {MEDfl-0.1.31.dist-info → MEDfl-0.1.33.dist-info}/top_level.txt +0 -1
  5. Medfl/LearningManager/__init__.py +13 -13
  6. Medfl/LearningManager/client.py +150 -150
  7. Medfl/LearningManager/dynamicModal.py +287 -287
  8. Medfl/LearningManager/federated_dataset.py +60 -60
  9. Medfl/LearningManager/flpipeline.py +192 -192
  10. Medfl/LearningManager/model.py +223 -223
  11. Medfl/LearningManager/params.yaml +14 -14
  12. Medfl/LearningManager/params_optimiser.py +442 -442
  13. Medfl/LearningManager/plot.py +229 -229
  14. Medfl/LearningManager/server.py +181 -181
  15. Medfl/LearningManager/strategy.py +82 -82
  16. Medfl/LearningManager/utils.py +331 -308
  17. Medfl/NetManager/__init__.py +10 -9
  18. Medfl/NetManager/database_connector.py +43 -48
  19. Medfl/NetManager/dataset.py +92 -92
  20. Medfl/NetManager/flsetup.py +320 -320
  21. Medfl/NetManager/net_helper.py +254 -248
  22. Medfl/NetManager/net_manager_queries.py +142 -137
  23. Medfl/NetManager/network.py +194 -174
  24. Medfl/NetManager/node.py +184 -178
  25. Medfl/__init__.py +3 -2
  26. Medfl/scripts/__init__.py +2 -0
  27. Medfl/scripts/base.py +30 -0
  28. Medfl/scripts/create_db.py +126 -0
  29. alembic/env.py +61 -61
  30. scripts/base.py +29 -29
  31. scripts/config.ini +5 -5
  32. scripts/create_db.py +133 -133
  33. MEDfl/LearningManager/__init__.py +0 -13
  34. MEDfl/LearningManager/client.py +0 -150
  35. MEDfl/LearningManager/dynamicModal.py +0 -287
  36. MEDfl/LearningManager/federated_dataset.py +0 -60
  37. MEDfl/LearningManager/flpipeline.py +0 -192
  38. MEDfl/LearningManager/model.py +0 -223
  39. MEDfl/LearningManager/params.yaml +0 -14
  40. MEDfl/LearningManager/params_optimiser.py +0 -442
  41. MEDfl/LearningManager/plot.py +0 -229
  42. MEDfl/LearningManager/server.py +0 -181
  43. MEDfl/LearningManager/strategy.py +0 -82
  44. MEDfl/LearningManager/utils.py +0 -333
  45. MEDfl/NetManager/__init__.py +0 -9
  46. MEDfl/NetManager/database_connector.py +0 -48
  47. MEDfl/NetManager/dataset.py +0 -92
  48. MEDfl/NetManager/flsetup.py +0 -320
  49. MEDfl/NetManager/net_helper.py +0 -248
  50. MEDfl/NetManager/net_manager_queries.py +0 -137
  51. MEDfl/NetManager/network.py +0 -174
  52. MEDfl/NetManager/node.py +0 -178
  53. MEDfl/__init__.py +0 -2
  54. MEDfl-0.1.31.data/scripts/setup_mysql.sh +0 -22
  55. MEDfl-0.1.31.dist-info/RECORD +0 -54
  56. scripts/db_config.ini +0 -6
@@ -1,137 +1,142 @@
1
- from .net_helper import is_str
2
-
3
- INSERT_DATASET = """
4
- INSERT INTO DataSets(DataSetName, NodeId, {columns})
5
- VALUES (:name, :NodeId, {values})
6
- """
7
- DELETE_DATASET = """
8
- DELETE FROM DataSets WHERE DataSetName = :name
9
- """
10
-
11
- SELECT_ALL_DATASET_NAMES = """
12
- SELECT DISTINCT DataSetName,NodeId FROM DataSets
13
- """
14
-
15
- SELECT_DATASET_BY_NAME = """
16
- SELECT * FROM DataSets WHERE DataSetName = :name
17
- """
18
-
19
- # node queries
20
- # sql_queries.py
21
-
22
- INSERT_NODE_QUERY = (
23
- "INSERT INTO Nodes(NodeName,NetId,train) VALUES ('{}',{}, {})"
24
- )
25
- DELETE_NODE_QUERY = "DELETE FROM Nodes WHERE NodeName = '{}'"
26
- SELECT_MASTER_COLUMNS_QUERY = "SELECT * FROM MasterDataset LIMIT 1"
27
- SELECT_DATASET_BY_COLUMN_QUERY = "SELECT * FROM MasterDataset WHERE {} = '{}'"
28
- SELECT_DATASET_BY_NODE_ID_QUERY = "SELECT * FROM DataSets WHERE NodeId = {}"
29
-
30
- SELECT_ALL_DATASETS_QUERY = "SELECT DISTINCT DataSetName,NodeName FROM DataSets,Nodes WHERE Nodes.NodeName = '{}' and Nodes.NodeId = DataSets.NodeId"
31
- SELECT_ALL_NODES_QUERY = "SELECT * FROM Nodes"
32
-
33
-
34
- # SQL query to insert a new network
35
- INSERT_NETWORK_QUERY = "INSERT INTO Networks(NetName) VALUES ('{name}')"
36
-
37
- # SQL query to delete a network
38
- DELETE_NETWORK_QUERY = "DELETE FROM Networks WHERE NetName = '{name}'"
39
-
40
- # SQL query to delete a network
41
- GET_NETWORK_QUERY = "SELECT * FROM Networks WHERE NetName = '{name}'"
42
-
43
-
44
- # SQL query to update a network
45
- UPDATE_NETWORK_QUERY = (
46
- "UPDATE Networks SET FLsetupId = {FLsetupId} WHERE NetId = {id}"
47
- )
48
-
49
- # SQL query to retrieve all nodes for a network
50
- LIST_ALL_NODES_QUERY = "SELECT Nodes.NodeName, Networks.NetName FROM Nodes, Networks WHERE Networks.NetName = '{name}' AND Networks.NetId = Nodes.NetId"
51
-
52
- # SQL query to create the MasterDataset table
53
- CREATE_MASTER_DATASET_TABLE_QUERY = """
54
- CREATE TABLE IF NOT EXISTS MasterDataset (
55
- PatientId INT NOT NULL AUTO_INCREMENT,
56
- {},
57
- PRIMARY KEY (PatientId)
58
- );
59
- """
60
-
61
- # SQL query to create the datasets table
62
- CREATE_DATASETS_TABLE_QUERY = """
63
- CREATE TABLE IF NOT EXISTS Datasets (
64
- DataSetId INT NOT NULL AUTO_INCREMENT,
65
- DataSetName VARCHAR(255),
66
- NodeId INT,
67
- {},
68
- PRIMARY KEY (DataSetId)
69
-
70
- );
71
- """
72
-
73
- # SQL query to insert dataset values
74
- INSERT_DATASET_VALUES_QUERY = "INSERT INTO MasterDataset({columns}, NodeId) VALUES ('{name}', {nodeId}, {values})"
75
-
76
-
77
- # FL setup_queries
78
- # sql_queries.py
79
-
80
- CREATE_FLSETUP_QUERY = """
81
- INSERT INTO FLsetup (name, description, creation_date, NetId, column_name)
82
- VALUES (:name, :description, :creation_date, :net_id, :column_name)
83
- """
84
-
85
- DELETE_FLSETUP_QUERY = """
86
- DELETE FROM FLsetup
87
- WHERE name = :name
88
- """
89
-
90
- UPDATE_FLSETUP_QUERY = UPDATE_NETWORK_QUERY = (
91
- "UPDATE FLsetup SET column_name ='{column_name}' WHERE name ='{FLsetupName}'"
92
- )
93
-
94
-
95
- READ_SETUP_QUERY = """
96
- SELECT * FROM FLsetup
97
- WHERE FLsetupId = :flsetup_id
98
- """
99
-
100
- READ_ALL_SETUPS_QUERY = """
101
- SELECT * FROM FLsetup
102
- """
103
-
104
- READ_NETWORK_BY_ID_QUERY = """
105
- SELECT * FROM Networks
106
- WHERE NetId = :net_id
107
- """
108
-
109
- READ_DISTINCT_NODES_QUERY = """
110
- SELECT DISTINCT {} FROM MasterDataset
111
- """
112
-
113
-
114
- # FederatedDataset Queries
115
- INSERT_FLDATASET_QUERY = (
116
- "INSERT INTO FedDatasets(name, FLsetupId) VALUES (:name, :FLsetupId)"
117
- )
118
- DELETE_FLDATASET_BY_SETUP_AND_PIPELINE_QUERY = "DELETE FROM FedDatasets WHERE FLsetupId = :FLsetupId AND FLpipeId = :FLpipeId"
119
-
120
-
121
- UPDATE_FLDATASET_QUERY = (
122
- "UPDATE FedDatasets SET FLpipeId = :FLpipeId WHERE FedId = :FedId"
123
- )
124
- SELECT_FLDATASET_BY_NAME_QUERY = "SELECT * FROM FedDatasets WHERE name = :name"
125
-
126
- CREATE_FLPIPELINE_QUERY = """
127
- INSERT INTO FLpipeline (name, description, creation_date, results)
128
- VALUES ('{name}', '{description}', '{creation_date}', '{result}')
129
- """
130
- DELETE_FLPIPELINE_QUERY = "DELETE FROM FLpipeline WHERE name = '{name}'"
131
-
132
- SELECT_FLPIPELINE_QUERY = "SELECT FROM FLpipeline WHERE name = '{name}'"
133
-
134
- CREATE_TEST_RESULTS_QUERY = """
135
- INSERT INTO testResults (pipelineid, nodename, confusionmatrix, accuracy , sensivity, ppv , npv , f1score , fpr , tpr )
136
- VALUES ('{pipelineId}', '{nodeName}', '{confusion_matrix}', '{accuracy}' , '{sensivity}' , '{ppv}' , '{npv}' , '{f1score}' , '{fpr}' , '{tpr}')
137
- """
1
+ from .net_helper import is_str
2
+
3
+ INSERT_DATASET = """
4
+ INSERT INTO DataSets(DataSetName, NodeId, {columns})
5
+ VALUES (:name, :NodeId, {values})
6
+ """
7
+ DELETE_DATASET = """
8
+ DELETE FROM DataSets WHERE DataSetName = :name
9
+ """
10
+
11
+ SELECT_ALL_DATASET_NAMES = """
12
+ SELECT DISTINCT DataSetName,NodeId FROM DataSets
13
+ """
14
+
15
+ SELECT_DATASET_BY_NAME = """
16
+ SELECT * FROM DataSets WHERE DataSetName = :name
17
+ """
18
+
19
+ # node queries
20
+ # sql_queries.py
21
+
22
+ INSERT_NODE_QUERY = (
23
+ "INSERT INTO Nodes(NodeName,NetId,train) VALUES ('{}',{}, {})"
24
+ )
25
+ DELETE_NODE_QUERY = "DELETE FROM Nodes WHERE NodeName = '{}'"
26
+ SELECT_MASTER_COLUMNS_QUERY = "SELECT * FROM MasterDataset LIMIT 1"
27
+ SELECT_DATASET_BY_COLUMN_QUERY = "SELECT * FROM MasterDataset WHERE {} = '{}'"
28
+ SELECT_DATASET_BY_NODE_ID_QUERY = "SELECT * FROM DataSets WHERE NodeId = {}"
29
+
30
+ SELECT_ALL_DATASETS_QUERY = "SELECT DISTINCT DataSetName,NodeName FROM DataSets,Nodes WHERE Nodes.NodeName = '{}' and Nodes.NodeId = DataSets.NodeId"
31
+ SELECT_ALL_NODES_QUERY = "SELECT * FROM Nodes"
32
+
33
+
34
+ # SQL query to insert a new network
35
+ INSERT_NETWORK_QUERY = "INSERT INTO Networks(NetName) VALUES (:name)"
36
+
37
+ # SQL query to delete a network
38
+ DELETE_NETWORK_QUERY = "DELETE FROM Networks WHERE NetName = '{name}'"
39
+
40
+ # SQL query to delete a network
41
+ GET_NETWORK_QUERY = "SELECT * FROM Networks WHERE NetName = '{name}'"
42
+
43
+
44
+ # SQL query to update a network
45
+ UPDATE_NETWORK_QUERY = (
46
+ "UPDATE Networks SET FLsetupId = {FLsetupId} WHERE NetId = {id}"
47
+ )
48
+
49
+ # SQL query to retrieve all nodes for a network
50
+ LIST_ALL_NODES_QUERY = """
51
+ SELECT Nodes.NodeName, Networks.NetName
52
+ FROM Nodes
53
+ JOIN Networks ON Networks.NetId = Nodes.NetId
54
+ WHERE Networks.NetName = :name
55
+ """
56
+
57
+ # SQL query to create the MasterDataset table (SQLite-compatible)
58
+ CREATE_MASTER_DATASET_TABLE_QUERY = """
59
+ CREATE TABLE IF NOT EXISTS MasterDataset (
60
+ PatientId INTEGER PRIMARY KEY AUTOINCREMENT,
61
+ {}
62
+ );
63
+ """
64
+
65
+
66
+ # SQL query to create the datasets table (SQLite-compatible)
67
+ CREATE_DATASETS_TABLE_QUERY = """
68
+ CREATE TABLE IF NOT EXISTS Datasets (
69
+ DataSetId INTEGER PRIMARY KEY AUTOINCREMENT,
70
+ DataSetName VARCHAR(255),
71
+ NodeId INT,
72
+ {}
73
+ );
74
+ """
75
+
76
+
77
+
78
+ # SQL query to insert dataset values
79
+ INSERT_DATASET_VALUES_QUERY = "INSERT INTO MasterDataset({columns}, NodeId) VALUES ('{name}', {nodeId}, {values})"
80
+
81
+
82
+ # FL setup_queries
83
+ # sql_queries.py
84
+
85
+ CREATE_FLSETUP_QUERY = """
86
+ INSERT INTO FLsetup (name, description, creation_date, NetId, column_name)
87
+ VALUES (:name, :description, :creation_date, :net_id, :column_name)
88
+ """
89
+
90
+ DELETE_FLSETUP_QUERY = """
91
+ DELETE FROM FLsetup
92
+ WHERE name = :name
93
+ """
94
+
95
+ UPDATE_FLSETUP_QUERY = UPDATE_NETWORK_QUERY = (
96
+ "UPDATE FLsetup SET column_name ='{column_name}' WHERE name ='{FLsetupName}'"
97
+ )
98
+
99
+
100
+ READ_SETUP_QUERY = """
101
+ SELECT * FROM FLsetup
102
+ WHERE FLsetupId = :flsetup_id
103
+ """
104
+
105
+ READ_ALL_SETUPS_QUERY = """
106
+ SELECT * FROM FLsetup
107
+ """
108
+
109
+ READ_NETWORK_BY_ID_QUERY = """
110
+ SELECT * FROM Networks
111
+ WHERE NetId = :net_id
112
+ """
113
+
114
+ READ_DISTINCT_NODES_QUERY = """
115
+ SELECT DISTINCT {} FROM MasterDataset
116
+ """
117
+
118
+
119
+ # FederatedDataset Queries
120
+ INSERT_FLDATASET_QUERY = (
121
+ "INSERT INTO FedDatasets(name, FLsetupId) VALUES (:name, :FLsetupId)"
122
+ )
123
+ DELETE_FLDATASET_BY_SETUP_AND_PIPELINE_QUERY = "DELETE FROM FedDatasets WHERE FLsetupId = :FLsetupId AND FLpipeId = :FLpipeId"
124
+
125
+
126
+ UPDATE_FLDATASET_QUERY = (
127
+ "UPDATE FedDatasets SET FLpipeId = :FLpipeId WHERE FedId = :FedId"
128
+ )
129
+ SELECT_FLDATASET_BY_NAME_QUERY = "SELECT * FROM FedDatasets WHERE name = :name"
130
+
131
+ CREATE_FLPIPELINE_QUERY = """
132
+ INSERT INTO FLpipeline (name, description, creation_date, results)
133
+ VALUES ('{name}', '{description}', '{creation_date}', '{result}')
134
+ """
135
+ DELETE_FLPIPELINE_QUERY = "DELETE FROM FLpipeline WHERE name = '{name}'"
136
+
137
+ SELECT_FLPIPELINE_QUERY = "SELECT FROM FLpipeline WHERE name = '{name}'"
138
+
139
+ CREATE_TEST_RESULTS_QUERY = """
140
+ INSERT INTO testResults (pipelineid, nodename, confusionmatrix, accuracy , sensivity, ppv , npv , f1score , fpr , tpr )
141
+ VALUES ('{pipelineId}', '{nodeName}', '{confusion_matrix}', '{accuracy}' , '{sensivity}' , '{ppv}' , '{npv}' , '{f1score}' , '{fpr}' , '{tpr}')
142
+ """
@@ -1,174 +1,194 @@
1
- # src/MEDfl/NetManager/network.py
2
-
3
- from MEDfl.LearningManager.utils import *
4
-
5
- from .net_helper import *
6
- from .net_manager_queries import (CREATE_MASTER_DATASET_TABLE_QUERY,
7
- CREATE_DATASETS_TABLE_QUERY,
8
- DELETE_NETWORK_QUERY,
9
- INSERT_NETWORK_QUERY, LIST_ALL_NODES_QUERY,
10
- UPDATE_NETWORK_QUERY, GET_NETWORK_QUERY)
11
- from .node import Node
12
- import pandas as pd
13
- from MEDfl.LearningManager.utils import params
14
-
15
- from sqlalchemy import text
16
-
17
-
18
- class Network:
19
- """
20
- A class representing a network.
21
-
22
- Attributes:
23
- name (str): The name of the network.
24
- mtable_exists (int): An integer flag indicating whether the MasterDataset table exists (1) or not (0).
25
- """
26
-
27
- def __init__(self, name: str = ""):
28
- """
29
- Initialize a Network instance.
30
-
31
- Parameters:
32
- name (str): The name of the network.
33
- """
34
- self.name = name
35
- self.mtable_exists = int(master_table_exists())
36
- self.validate()
37
-
38
- db_manager = DatabaseManager() ;
39
- db_manager.connect() ;
40
- self.eng = db_manager.get_connection()
41
-
42
- def validate(self):
43
- """Validate name"""
44
-
45
- if not isinstance(self.name, str):
46
- raise TypeError("name argument must be a string")
47
-
48
- def create_network(self):
49
- """Create a new network in the database."""
50
- self.eng.execute(text(INSERT_NETWORK_QUERY.format(name=self.name)))
51
- self.id = get_netid_from_name(self.name)
52
-
53
- def use_network(self, network_name: str):
54
- """Use a network in the database.
55
-
56
- Parameters:
57
- network_name (str): The name of the network to use.
58
-
59
- Returns:
60
- Network or None: An instance of the Network class if the network exists, else None.
61
-
62
- """
63
- network = pd.read_sql(
64
- text(GET_NETWORK_QUERY.format(name=network_name)),
65
- self.eng,
66
- )
67
-
68
- if (network.NetId[0]):
69
- self.name = network.NetName[0]
70
- self.id = network.NetId[0]
71
- self.mtable_exists = int(master_table_exists())
72
- self.validate()
73
- return self
74
- else:
75
- return None
76
-
77
- def delete_network(self):
78
- """Delete the network from the database."""
79
- self.eng.execute(text(DELETE_NETWORK_QUERY.format(name=self.name)))
80
-
81
- def update_network(self, FLsetupId: int):
82
- """Update the network's FLsetupId in the database.
83
-
84
- Parameters:
85
- FLsetupId (int): The FLsetupId to update.
86
- """
87
- self.eng.execute(
88
- text(UPDATE_NETWORK_QUERY.format(FLsetupId=FLsetupId, id=self.id))
89
- )
90
-
91
- def add_node(self, node: Node):
92
- """Add a node to the network.
93
-
94
- Parameters:
95
- node (Node): The node to add.
96
- """
97
- node.create_node(self.id)
98
-
99
- def list_allnodes(self):
100
- """List all nodes in the network.
101
-
102
- Parameters:
103
- None
104
-
105
- Returns:
106
- DataFrame: A DataFrame containing information about all nodes in the network.
107
-
108
- """
109
- query = text(LIST_ALL_NODES_QUERY.format(name=self.name))
110
- result_proxy = self.eng.execute(query)
111
- result_df = pd.DataFrame(result_proxy.fetchall(), columns=result_proxy.keys())
112
- return result_df
113
-
114
- def create_master_dataset(self, path_to_csv: str = params['path_to_master_csv']):
115
- """
116
- Create the MasterDataset table and insert dataset values.
117
-
118
- :param path_to_csv: Path to the CSV file containing the dataset.
119
- """
120
- print(path_to_csv)
121
- # Read the CSV file into a Pandas DataFrame
122
- data_df = pd.read_csv(path_to_csv)
123
-
124
- # Process the data if needed (e.g., handle missing values, encode categorical variables)
125
- # ...
126
-
127
- # Check if the MasterDataset table exists
128
-
129
- if self.mtable_exists != 1:
130
- columns = data_df.columns.tolist()
131
- columns_str = ",\n".join(
132
- [
133
- f"{col} {column_map[str(data_df[col].dtype)]}"
134
- for col in columns
135
- ]
136
- )
137
- self.eng.execute(
138
- text(CREATE_MASTER_DATASET_TABLE_QUERY.format(columns_str))
139
- )
140
- self.eng.execute(text(CREATE_DATASETS_TABLE_QUERY.format(columns_str)))
141
-
142
- # Get the list of columns in the DataFrame
143
-
144
- data_df = process_eicu(data_df)
145
- # Insert the dataset values into the MasterDataset table
146
-
147
- for index, row in data_df.iterrows():
148
- query_1 = "INSERT INTO MasterDataset(" + "".join(
149
- f"{x}," for x in columns
150
- )
151
- query_2 = f"VALUES (" + "".join(
152
- f"{is_str(data_df, row, x)}," for x in columns
153
- )
154
- query = query_1[:-1] + ")" + query_2[:-1] + ")"
155
- self.eng.execute(text(query))
156
-
157
- # Set mtable_exists flag to True
158
- self.mtable_exists = 1
159
-
160
- @staticmethod
161
- def list_allnetworks():
162
- """List all networks in the database.
163
- Returns:
164
- DataFrame: A DataFrame containing information about all networks in the database.
165
-
166
- """
167
- db_manager = DatabaseManager() ;
168
- db_manager.connect() ;
169
- my_eng = db_manager.get_connection() ;
170
-
171
- result_proxy = my_eng.execute("SELECT * FROM Networks")
172
- result = result_proxy.fetchall()
173
- return result
174
-
1
+ # src/MEDfl/NetManager/network.py
2
+
3
+ from MEDfl.LearningManager.utils import *
4
+ from .net_helper import *
5
+ from .net_manager_queries import (CREATE_MASTER_DATASET_TABLE_QUERY,
6
+ CREATE_DATASETS_TABLE_QUERY,
7
+ DELETE_NETWORK_QUERY,
8
+ INSERT_NETWORK_QUERY, LIST_ALL_NODES_QUERY,
9
+ UPDATE_NETWORK_QUERY, GET_NETWORK_QUERY)
10
+ from .node import Node
11
+ import pandas as pd
12
+ from MEDfl.LearningManager.utils import params
13
+
14
+ from sqlalchemy import text
15
+ from sqlalchemy.exc import SQLAlchemyError
16
+
17
+ class Network:
18
+ """
19
+ A class representing a network.
20
+
21
+ Attributes:
22
+ name (str): The name of the network.
23
+ mtable_exists (int): An integer flag indicating whether the MasterDataset table exists (1) or not (0).
24
+ """
25
+
26
+ def __init__(self, name: str = ""):
27
+ """
28
+ Initialize a Network instance.
29
+
30
+ Parameters:
31
+ name (str): The name of the network.
32
+ """
33
+ self.name = name
34
+ self.mtable_exists = int(master_table_exists())
35
+ self.validate()
36
+
37
+ db_manager = DatabaseManager()
38
+ db_manager.connect()
39
+ self.eng = db_manager.get_connection()
40
+
41
+ def validate(self):
42
+ """Validate name"""
43
+
44
+ if not isinstance(self.name, str):
45
+ raise TypeError("name argument must be a string")
46
+
47
+ def create_network(self):
48
+ """Create a new network in the database."""
49
+ try:
50
+ print(self.name)
51
+ self.eng.execute(text(INSERT_NETWORK_QUERY), {"name": self.name})
52
+ self.id = self.get_netid_from_name(self.name)
53
+ except SQLAlchemyError as e:
54
+ print(f"Error creating network: {e}")
55
+
56
+ def use_network(self, network_name: str):
57
+ """Use a network in the database.
58
+
59
+ Parameters:
60
+ network_name (str): The name of the network to use.
61
+
62
+ Returns:
63
+ Network or None: An instance of the Network class if the network exists, else None.
64
+ """
65
+ try:
66
+ network = pd.read_sql(
67
+ text(GET_NETWORK_QUERY),
68
+ self.eng,
69
+ params={"name": network_name}
70
+ )
71
+ if not network.empty:
72
+ self.name = network.iloc[0]['NetName']
73
+ self.id = network.iloc[0]['NetId']
74
+ self.mtable_exists = int(master_table_exists())
75
+ self.validate()
76
+ return self
77
+ else:
78
+ return None
79
+ except SQLAlchemyError as e:
80
+ print(f"Error using network: {e}")
81
+ return None
82
+
83
+ def delete_network(self):
84
+ """Delete the network from the database."""
85
+ try:
86
+ self.eng.execute(text(DELETE_NETWORK_QUERY), {"name": self.name})
87
+ except SQLAlchemyError as e:
88
+ print(f"Error deleting network: {e}")
89
+
90
+ def update_network(self, FLsetupId: int):
91
+ """Update the network's FLsetupId in the database.
92
+
93
+ Parameters:
94
+ FLsetupId (int): The FLsetupId to update.
95
+ """
96
+ try:
97
+ self.eng.execute(
98
+ text(UPDATE_NETWORK_QUERY),
99
+ {"FLsetupId": FLsetupId, "id": self.id}
100
+ )
101
+ except SQLAlchemyError as e:
102
+ print(f"Error updating network: {e}")
103
+
104
+ def add_node(self, node: Node):
105
+ """Add a node to the network.
106
+
107
+ Parameters:
108
+ node (Node): The node to add.
109
+ """
110
+ node.create_node(self.id)
111
+
112
+ def list_allnodes(self):
113
+ """List all nodes in the network.
114
+
115
+ Returns:
116
+ DataFrame: A DataFrame containing information about all nodes in the network.
117
+ """
118
+ try:
119
+ query = text(LIST_ALL_NODES_QUERY)
120
+ result_proxy = self.eng.execute(query, name=self.name)
121
+ result_df = pd.DataFrame(result_proxy.fetchall(), columns=result_proxy.keys())
122
+ return result_df
123
+ except SQLAlchemyError as e:
124
+ print(f"Error listing all nodes: {e}")
125
+ return pd.DataFrame()
126
+
127
+ def create_master_dataset(self, path_to_csv: str = params['path_to_master_csv']):
128
+ """
129
+ Create the MasterDataset table and insert dataset values.
130
+
131
+ :param path_to_csv: Path to the CSV file containing the dataset.
132
+ """
133
+ try:
134
+ print(path_to_csv)
135
+ data_df = pd.read_csv(path_to_csv)
136
+
137
+ if self.mtable_exists != 1:
138
+ columns = data_df.columns.tolist()
139
+ columns_str = ",\n".join(
140
+ [
141
+ f"{col} {column_map[str(data_df[col].dtype)]}"
142
+ for col in columns
143
+ ]
144
+ )
145
+ self.eng.execute(
146
+ text(CREATE_MASTER_DATASET_TABLE_QUERY.format(columns_str))
147
+ )
148
+ self.eng.execute(text(CREATE_DATASETS_TABLE_QUERY.format(columns_str)))
149
+
150
+ # Process data
151
+ data_df = process_eicu(data_df)
152
+
153
+ # Insert data in batches
154
+ batch_size = 1000 # Adjust as needed
155
+ for start_idx in range(0, len(data_df), batch_size):
156
+ batch_data = data_df.iloc[start_idx:start_idx + batch_size]
157
+ insert_query = f"INSERT INTO MasterDataset ({', '.join(columns)}) VALUES ({', '.join([':' + col for col in columns])})"
158
+ data_to_insert = batch_data.to_dict(orient='records')
159
+ self.eng.execute(text(insert_query), data_to_insert)
160
+
161
+ self.mtable_exists = 1
162
+ except SQLAlchemyError as e:
163
+ print(f"Error creating master dataset: {e}")
164
+
165
+ @staticmethod
166
+ def list_allnetworks():
167
+ """List all networks in the database.
168
+
169
+ Returns:
170
+ DataFrame: A DataFrame containing information about all networks in the database.
171
+ """
172
+ try:
173
+ db_manager = DatabaseManager()
174
+ db_manager.connect()
175
+ my_eng = db_manager.get_connection()
176
+
177
+ result_proxy = my_eng.execute("SELECT * FROM Networks")
178
+ result = result_proxy.fetchall()
179
+ return pd.DataFrame(result, columns=result_proxy.keys())
180
+ except SQLAlchemyError as e:
181
+ print(f"Error listing all networks: {e}")
182
+ return pd.DataFrame()
183
+
184
+ def get_netid_from_name(self, name):
185
+ """Get network ID from network name."""
186
+ try:
187
+ result = self.eng.execute(text("SELECT NetId FROM Networks WHERE NetName = :name"), {"name": name}).fetchone()
188
+ if result:
189
+ return result[0]
190
+ else:
191
+ return None
192
+ except SQLAlchemyError as e:
193
+ print(f"Error fetching network ID: {e}")
194
+ return None