MEDfl 0.2.1__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- MEDfl/LearningManager/__init__.py +13 -13
- MEDfl/LearningManager/client.py +150 -181
- MEDfl/LearningManager/dynamicModal.py +287 -287
- MEDfl/LearningManager/federated_dataset.py +60 -60
- MEDfl/LearningManager/flpipeline.py +192 -192
- MEDfl/LearningManager/model.py +223 -223
- MEDfl/LearningManager/params.yaml +14 -14
- MEDfl/LearningManager/params_optimiser.py +442 -442
- MEDfl/LearningManager/plot.py +229 -229
- MEDfl/LearningManager/server.py +181 -189
- MEDfl/LearningManager/strategy.py +82 -138
- MEDfl/LearningManager/utils.py +331 -331
- MEDfl/NetManager/__init__.py +10 -10
- MEDfl/NetManager/database_connector.py +43 -43
- MEDfl/NetManager/dataset.py +92 -92
- MEDfl/NetManager/flsetup.py +320 -320
- MEDfl/NetManager/net_helper.py +254 -254
- MEDfl/NetManager/net_manager_queries.py +142 -142
- MEDfl/NetManager/network.py +194 -194
- MEDfl/NetManager/node.py +184 -184
- MEDfl/__init__.py +2 -2
- MEDfl/scripts/__init__.py +1 -1
- MEDfl/scripts/base.py +29 -29
- MEDfl/scripts/create_db.py +126 -126
- Medfl/LearningManager/__init__.py +13 -0
- Medfl/LearningManager/client.py +150 -0
- Medfl/LearningManager/dynamicModal.py +287 -0
- Medfl/LearningManager/federated_dataset.py +60 -0
- Medfl/LearningManager/flpipeline.py +192 -0
- Medfl/LearningManager/model.py +223 -0
- Medfl/LearningManager/params.yaml +14 -0
- Medfl/LearningManager/params_optimiser.py +442 -0
- Medfl/LearningManager/plot.py +229 -0
- Medfl/LearningManager/server.py +181 -0
- Medfl/LearningManager/strategy.py +82 -0
- Medfl/LearningManager/utils.py +331 -0
- Medfl/NetManager/__init__.py +10 -0
- Medfl/NetManager/database_connector.py +43 -0
- Medfl/NetManager/dataset.py +92 -0
- Medfl/NetManager/flsetup.py +320 -0
- Medfl/NetManager/net_helper.py +254 -0
- Medfl/NetManager/net_manager_queries.py +142 -0
- Medfl/NetManager/network.py +194 -0
- Medfl/NetManager/node.py +184 -0
- Medfl/__init__.py +3 -0
- Medfl/scripts/__init__.py +2 -0
- Medfl/scripts/base.py +30 -0
- Medfl/scripts/create_db.py +126 -0
- alembic/env.py +61 -61
- {MEDfl-0.2.1.dist-info → medfl-2.0.0.dist-info}/METADATA +120 -108
- medfl-2.0.0.dist-info/RECORD +55 -0
- {MEDfl-0.2.1.dist-info → medfl-2.0.0.dist-info}/WHEEL +1 -1
- {MEDfl-0.2.1.dist-info → medfl-2.0.0.dist-info/licenses}/LICENSE +674 -674
- MEDfl-0.2.1.dist-info/RECORD +0 -31
- {MEDfl-0.2.1.dist-info → medfl-2.0.0.dist-info}/top_level.txt +0 -0
MEDfl/NetManager/__init__.py
CHANGED
@@ -1,10 +1,10 @@
|
|
1
|
-
# # MEDfl/NetworkManager/__init__.py
|
2
|
-
|
3
|
-
# # Import modules from this package
|
4
|
-
# from .dataset import *
|
5
|
-
# from .flsetup import *
|
6
|
-
# from .net_helper import *
|
7
|
-
# from .net_manager_queries import *
|
8
|
-
# from .network import *
|
9
|
-
# from .node import *
|
10
|
-
# from .database_connector import *
|
1
|
+
# # MEDfl/NetworkManager/__init__.py
|
2
|
+
|
3
|
+
# # Import modules from this package
|
4
|
+
# from .dataset import *
|
5
|
+
# from .flsetup import *
|
6
|
+
# from .net_helper import *
|
7
|
+
# from .net_manager_queries import *
|
8
|
+
# from .network import *
|
9
|
+
# from .node import *
|
10
|
+
# from .database_connector import *
|
@@ -1,43 +1,43 @@
|
|
1
|
-
import os
|
2
|
-
import subprocess
|
3
|
-
from sqlalchemy import create_engine
|
4
|
-
from configparser import ConfigParser
|
5
|
-
|
6
|
-
class DatabaseManager:
|
7
|
-
def __init__(self):
|
8
|
-
from MEDfl.LearningManager.utils import load_db_config
|
9
|
-
db_config = load_db_config()
|
10
|
-
if db_config:
|
11
|
-
self.config = db_config
|
12
|
-
else:
|
13
|
-
self.config = None
|
14
|
-
self.engine = None
|
15
|
-
|
16
|
-
def connect(self):
|
17
|
-
if not self.config:
|
18
|
-
raise ValueError("Database configuration not loaded. Use load_db_config() or set_config_path() first.")
|
19
|
-
# Assuming the SQLite database file path is provided in the config with the key 'database'
|
20
|
-
database_path = self.config['database']
|
21
|
-
connection_string = f"sqlite:///{database_path}"
|
22
|
-
self.engine = create_engine(connection_string, pool_pre_ping=True)
|
23
|
-
|
24
|
-
def get_connection(self):
|
25
|
-
if not self.engine:
|
26
|
-
self.connect()
|
27
|
-
return self.engine.connect()
|
28
|
-
|
29
|
-
def create_MEDfl_db(self, path_to_csv):
|
30
|
-
# Get the directory of the current script
|
31
|
-
current_directory = os.path.dirname(__file__)
|
32
|
-
|
33
|
-
# Define the path to the create_db.py script
|
34
|
-
create_db_script_path = os.path.join(current_directory, '..', 'scripts', 'create_db.py')
|
35
|
-
|
36
|
-
# Execute the create_db.py script
|
37
|
-
subprocess.run(['
|
38
|
-
|
39
|
-
return
|
40
|
-
|
41
|
-
def close(self):
|
42
|
-
if self.engine:
|
43
|
-
self.engine.dispose()
|
1
|
+
import os
|
2
|
+
import subprocess
|
3
|
+
from sqlalchemy import create_engine
|
4
|
+
from configparser import ConfigParser
|
5
|
+
|
6
|
+
class DatabaseManager:
|
7
|
+
def __init__(self):
|
8
|
+
from MEDfl.LearningManager.utils import load_db_config
|
9
|
+
db_config = load_db_config()
|
10
|
+
if db_config:
|
11
|
+
self.config = db_config
|
12
|
+
else:
|
13
|
+
self.config = None
|
14
|
+
self.engine = None
|
15
|
+
|
16
|
+
def connect(self):
|
17
|
+
if not self.config:
|
18
|
+
raise ValueError("Database configuration not loaded. Use load_db_config() or set_config_path() first.")
|
19
|
+
# Assuming the SQLite database file path is provided in the config with the key 'database'
|
20
|
+
database_path = self.config['database']
|
21
|
+
connection_string = f"sqlite:///{database_path}"
|
22
|
+
self.engine = create_engine(connection_string, pool_pre_ping=True)
|
23
|
+
|
24
|
+
def get_connection(self):
|
25
|
+
if not self.engine:
|
26
|
+
self.connect()
|
27
|
+
return self.engine.connect()
|
28
|
+
|
29
|
+
def create_MEDfl_db(self, path_to_csv):
|
30
|
+
# Get the directory of the current script
|
31
|
+
current_directory = os.path.dirname(__file__)
|
32
|
+
|
33
|
+
# Define the path to the create_db.py script
|
34
|
+
create_db_script_path = os.path.join(current_directory, '..', 'scripts', 'create_db.py')
|
35
|
+
|
36
|
+
# Execute the create_db.py script
|
37
|
+
subprocess.run(['python3', create_db_script_path, path_to_csv], check=True)
|
38
|
+
|
39
|
+
return
|
40
|
+
|
41
|
+
def close(self):
|
42
|
+
if self.engine:
|
43
|
+
self.engine.dispose()
|
MEDfl/NetManager/dataset.py
CHANGED
@@ -1,92 +1,92 @@
|
|
1
|
-
import pandas as pd
|
2
|
-
from sqlalchemy import text
|
3
|
-
|
4
|
-
from .net_helper import *
|
5
|
-
from .net_manager_queries import (DELETE_DATASET, INSERT_DATASET,
|
6
|
-
SELECT_ALL_DATASET_NAMES)
|
7
|
-
from MEDfl.NetManager.database_connector import DatabaseManager
|
8
|
-
|
9
|
-
class DataSet:
|
10
|
-
def __init__(self, name: str, path: str, engine=None):
|
11
|
-
"""
|
12
|
-
Initialize a DataSet object.
|
13
|
-
|
14
|
-
:param name: The name of the dataset.
|
15
|
-
:type name: str
|
16
|
-
:param path: The file path of the dataset CSV file.
|
17
|
-
:type path: str
|
18
|
-
"""
|
19
|
-
self.name = name
|
20
|
-
self.path = path
|
21
|
-
db_manager = DatabaseManager()
|
22
|
-
db_manager.connect()
|
23
|
-
self.engine = db_manager.get_connection()
|
24
|
-
|
25
|
-
def validate(self):
|
26
|
-
"""
|
27
|
-
Validate name and path attributes.
|
28
|
-
|
29
|
-
:raises TypeError: If name or path is not a string.
|
30
|
-
"""
|
31
|
-
if not isinstance(self.name, str):
|
32
|
-
raise TypeError("name argument must be a string")
|
33
|
-
|
34
|
-
if not isinstance(self.path, str):
|
35
|
-
raise TypeError("path argument must be a string")
|
36
|
-
|
37
|
-
def upload_dataset(self, NodeId=-1):
|
38
|
-
"""
|
39
|
-
Upload the dataset to the database.
|
40
|
-
|
41
|
-
:param NodeId: The NodeId associated with the dataset.
|
42
|
-
:type NodeId: int
|
43
|
-
|
44
|
-
Notes:
|
45
|
-
- Assumes the file at self.path is a valid CSV file.
|
46
|
-
- The dataset is uploaded to the 'DataSets' table in the database.
|
47
|
-
"""
|
48
|
-
|
49
|
-
data_df = pd.read_csv(self.path)
|
50
|
-
nodeId = NodeId
|
51
|
-
columns = data_df.columns.tolist()
|
52
|
-
|
53
|
-
|
54
|
-
data_df = process_eicu(data_df)
|
55
|
-
for index, row in data_df.iterrows():
|
56
|
-
query_1 = "INSERT INTO DataSets(DataSetName,nodeId," + "".join(
|
57
|
-
f"{x}," for x in columns
|
58
|
-
)
|
59
|
-
query_2 = f" VALUES ('{self.name}',{nodeId}, " + "".join(
|
60
|
-
f"{is_str(data_df, row, x)}," for x in columns
|
61
|
-
)
|
62
|
-
query = query_1[:-1] + ")" + query_2[:-1] + ")"
|
63
|
-
|
64
|
-
self.engine.execute(text(query))
|
65
|
-
|
66
|
-
def delete_dataset(self):
|
67
|
-
"""
|
68
|
-
Delete the dataset from the database.
|
69
|
-
|
70
|
-
Notes:
|
71
|
-
- Assumes the dataset name is unique in the 'DataSets' table.
|
72
|
-
"""
|
73
|
-
self.engine.execute(text(DELETE_DATASET), {"name": self.name})
|
74
|
-
|
75
|
-
def update_data(self):
|
76
|
-
"""
|
77
|
-
Update the data in the dataset.
|
78
|
-
|
79
|
-
Not implemented yet.
|
80
|
-
"""
|
81
|
-
pass
|
82
|
-
|
83
|
-
@staticmethod
|
84
|
-
def list_alldatasets(engine):
|
85
|
-
"""
|
86
|
-
List all dataset names from the 'DataSets' table.
|
87
|
-
|
88
|
-
:returns: A DataFrame containing the names of all datasets in the 'DataSets' table.
|
89
|
-
:rtype: pd.DataFrame
|
90
|
-
"""
|
91
|
-
res = pd.read_sql(text(SELECT_ALL_DATASET_NAMES), engine)
|
92
|
-
return res
|
1
|
+
import pandas as pd
|
2
|
+
from sqlalchemy import text
|
3
|
+
|
4
|
+
from .net_helper import *
|
5
|
+
from .net_manager_queries import (DELETE_DATASET, INSERT_DATASET,
|
6
|
+
SELECT_ALL_DATASET_NAMES)
|
7
|
+
from MEDfl.NetManager.database_connector import DatabaseManager
|
8
|
+
|
9
|
+
class DataSet:
|
10
|
+
def __init__(self, name: str, path: str, engine=None):
|
11
|
+
"""
|
12
|
+
Initialize a DataSet object.
|
13
|
+
|
14
|
+
:param name: The name of the dataset.
|
15
|
+
:type name: str
|
16
|
+
:param path: The file path of the dataset CSV file.
|
17
|
+
:type path: str
|
18
|
+
"""
|
19
|
+
self.name = name
|
20
|
+
self.path = path
|
21
|
+
db_manager = DatabaseManager()
|
22
|
+
db_manager.connect()
|
23
|
+
self.engine = db_manager.get_connection()
|
24
|
+
|
25
|
+
def validate(self):
|
26
|
+
"""
|
27
|
+
Validate name and path attributes.
|
28
|
+
|
29
|
+
:raises TypeError: If name or path is not a string.
|
30
|
+
"""
|
31
|
+
if not isinstance(self.name, str):
|
32
|
+
raise TypeError("name argument must be a string")
|
33
|
+
|
34
|
+
if not isinstance(self.path, str):
|
35
|
+
raise TypeError("path argument must be a string")
|
36
|
+
|
37
|
+
def upload_dataset(self, NodeId=-1):
|
38
|
+
"""
|
39
|
+
Upload the dataset to the database.
|
40
|
+
|
41
|
+
:param NodeId: The NodeId associated with the dataset.
|
42
|
+
:type NodeId: int
|
43
|
+
|
44
|
+
Notes:
|
45
|
+
- Assumes the file at self.path is a valid CSV file.
|
46
|
+
- The dataset is uploaded to the 'DataSets' table in the database.
|
47
|
+
"""
|
48
|
+
|
49
|
+
data_df = pd.read_csv(self.path)
|
50
|
+
nodeId = NodeId
|
51
|
+
columns = data_df.columns.tolist()
|
52
|
+
|
53
|
+
|
54
|
+
data_df = process_eicu(data_df)
|
55
|
+
for index, row in data_df.iterrows():
|
56
|
+
query_1 = "INSERT INTO DataSets(DataSetName,nodeId," + "".join(
|
57
|
+
f"{x}," for x in columns
|
58
|
+
)
|
59
|
+
query_2 = f" VALUES ('{self.name}',{nodeId}, " + "".join(
|
60
|
+
f"{is_str(data_df, row, x)}," for x in columns
|
61
|
+
)
|
62
|
+
query = query_1[:-1] + ")" + query_2[:-1] + ")"
|
63
|
+
|
64
|
+
self.engine.execute(text(query))
|
65
|
+
|
66
|
+
def delete_dataset(self):
|
67
|
+
"""
|
68
|
+
Delete the dataset from the database.
|
69
|
+
|
70
|
+
Notes:
|
71
|
+
- Assumes the dataset name is unique in the 'DataSets' table.
|
72
|
+
"""
|
73
|
+
self.engine.execute(text(DELETE_DATASET), {"name": self.name})
|
74
|
+
|
75
|
+
def update_data(self):
|
76
|
+
"""
|
77
|
+
Update the data in the dataset.
|
78
|
+
|
79
|
+
Not implemented yet.
|
80
|
+
"""
|
81
|
+
pass
|
82
|
+
|
83
|
+
@staticmethod
|
84
|
+
def list_alldatasets(engine):
|
85
|
+
"""
|
86
|
+
List all dataset names from the 'DataSets' table.
|
87
|
+
|
88
|
+
:returns: A DataFrame containing the names of all datasets in the 'DataSets' table.
|
89
|
+
:rtype: pd.DataFrame
|
90
|
+
"""
|
91
|
+
res = pd.read_sql(text(SELECT_ALL_DATASET_NAMES), engine)
|
92
|
+
return res
|