MEDfl 0.1.26__py3-none-any.whl → 0.1.27__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. MEDfl/LearningManager/__init__.py +13 -0
  2. MEDfl/LearningManager/client.py +150 -0
  3. MEDfl/LearningManager/dynamicModal.py +287 -0
  4. MEDfl/LearningManager/federated_dataset.py +60 -0
  5. MEDfl/LearningManager/flpipeline.py +192 -0
  6. MEDfl/LearningManager/model.py +223 -0
  7. MEDfl/LearningManager/params.yaml +14 -0
  8. MEDfl/LearningManager/params_optimiser.py +442 -0
  9. MEDfl/LearningManager/plot.py +229 -0
  10. MEDfl/LearningManager/server.py +181 -0
  11. MEDfl/LearningManager/strategy.py +82 -0
  12. MEDfl/LearningManager/utils.py +308 -0
  13. MEDfl/NetManager/__init__.py +9 -0
  14. MEDfl/NetManager/database_connector.py +48 -0
  15. MEDfl/NetManager/dataset.py +92 -0
  16. MEDfl/NetManager/flsetup.py +320 -0
  17. MEDfl/NetManager/net_helper.py +248 -0
  18. MEDfl/NetManager/net_manager_queries.py +137 -0
  19. MEDfl/NetManager/network.py +174 -0
  20. MEDfl/NetManager/node.py +178 -0
  21. MEDfl/__init__.py +2 -0
  22. {MEDfl-0.1.26.data → MEDfl-0.1.27.data}/scripts/setup_mysql.sh +22 -22
  23. {MEDfl-0.1.26.dist-info → MEDfl-0.1.27.dist-info}/METADATA +127 -127
  24. MEDfl-0.1.27.dist-info/RECORD +53 -0
  25. {MEDfl-0.1.26.dist-info → MEDfl-0.1.27.dist-info}/WHEEL +1 -1
  26. Medfl/LearningManager/__init__.py +13 -13
  27. Medfl/LearningManager/client.py +150 -150
  28. Medfl/LearningManager/dynamicModal.py +287 -287
  29. Medfl/LearningManager/federated_dataset.py +60 -60
  30. Medfl/LearningManager/flpipeline.py +192 -192
  31. Medfl/LearningManager/model.py +223 -223
  32. Medfl/LearningManager/params.yaml +14 -14
  33. Medfl/LearningManager/params_optimiser.py +442 -442
  34. Medfl/LearningManager/plot.py +229 -229
  35. Medfl/LearningManager/server.py +181 -181
  36. Medfl/LearningManager/strategy.py +82 -82
  37. Medfl/LearningManager/utils.py +308 -308
  38. Medfl/NetManager/__init__.py +9 -9
  39. Medfl/NetManager/database_connector.py +48 -48
  40. Medfl/NetManager/dataset.py +92 -92
  41. Medfl/NetManager/flsetup.py +320 -320
  42. Medfl/NetManager/net_helper.py +248 -248
  43. Medfl/NetManager/net_manager_queries.py +137 -137
  44. Medfl/NetManager/network.py +174 -174
  45. Medfl/NetManager/node.py +178 -178
  46. Medfl/__init__.py +1 -1
  47. alembic/env.py +61 -61
  48. scripts/base.py +29 -29
  49. scripts/config.ini +5 -5
  50. scripts/create_db.py +133 -133
  51. MEDfl-0.1.26.dist-info/RECORD +0 -32
  52. {MEDfl-0.1.26.dist-info → MEDfl-0.1.27.dist-info}/top_level.txt +0 -0
scripts/create_db.py CHANGED
@@ -1,133 +1,133 @@
1
- import sys
2
- import mysql.connector
3
- import pandas as pd
4
- from mysql.connector import Error
5
-
6
- from configparser import ConfigParser
7
- import os
8
-
9
- def main(csv_file_path):
10
- try:
11
- # Get the directory of the current script
12
- current_directory = os.path.dirname(os.path.abspath(__file__))
13
-
14
- # Load configuration from the config file
15
- config_file_path = os.path.join(current_directory, 'db_config.ini')
16
-
17
- config = ConfigParser()
18
- config.read(config_file_path)
19
- mysql_config = config['mysql']
20
-
21
- print('Im here !')
22
-
23
- mydb = mysql.connector.connect(host=mysql_config['host'], user=mysql_config['user'], password=mysql_config['password'])
24
- mycursor = mydb.cursor()
25
-
26
- # Create the 'MEDfl' database if it doesn't exist
27
- mycursor.execute("CREATE DATABASE IF NOT EXISTS MEDfl")
28
-
29
- # Select the 'MEDfl' database
30
- mycursor.execute("USE MEDfl")
31
-
32
- # Get the list of all tables in the database
33
- mycursor.execute("SHOW TABLES")
34
- tables = mycursor.fetchall()
35
-
36
- # Drop each table one by one
37
- for table in tables:
38
- table_name = table[0]
39
- mycursor.execute(f"DROP TABLE IF EXISTS {table_name}")
40
-
41
- # Create Networks table
42
- mycursor.execute(
43
- "CREATE TABLE Networks( \
44
- NetId INT NOT NULL AUTO_INCREMENT, \
45
- NetName VARCHAR(255), \
46
- PRIMARY KEY (NetId) \
47
- );"
48
- )
49
-
50
- # Create FLsetup table
51
- mycursor.execute("CREATE TABLE FLsetup (\
52
- FLsetupId int NOT NULL AUTO_INCREMENT,\
53
- name varchar(255) NOT NULL, \
54
- description varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NOT NULL,\
55
- creation_date datetime NOT NULL,\
56
- NetId int NOT NULL,\
57
- column_name varchar(255) DEFAULT NULL,\
58
- PRIMARY KEY (`FLsetupId`) \
59
- )")
60
-
61
- # Create Nodes table
62
- mycursor.execute("CREATE TABLE Nodes ( \
63
- NodeId int NOT NULL AUTO_INCREMENT,\
64
- NodeName varchar(255) DEFAULT NULL,\
65
- train tinyint(1) DEFAULT '1',\
66
- NetId int DEFAULT NULL,\
67
- PRIMARY KEY (NodeId)\
68
- )")
69
-
70
- data_df = pd.read_csv(csv_file_path)
71
- columns = data_df.columns.tolist()
72
- column_map = {"object": "VARCHAR(255)", "int64": "INT", "float64": "FLOAT"}
73
- sub_query = "".join(f"{col} {column_map[str(data_df[col].dtype)]}," for col in columns)
74
-
75
- # Create Datasets table by getting columns from the master csv file
76
- mycursor.execute(
77
- f"CREATE TABLE DataSets( \
78
- DataSetId INT NOT NULL AUTO_INCREMENT, \
79
- DataSetName VARCHAR(255), \
80
- NodeId INT CHECK (NodeId = -1 OR NodeId IS NOT NULL),\
81
- {sub_query}\
82
- PRIMARY KEY (DataSetId)\
83
- )"
84
- )
85
-
86
- # Create FLpipeline table
87
- mycursor.execute("CREATE TABLE FLpipeline(\
88
- id int NOT NULL AUTO_INCREMENT,\
89
- name varchar(255) NOT NULL, \
90
- description varchar(255) NOT NULL,\
91
- creation_date datetime NOT NULL,\
92
- results longtext NOT NULL,\
93
- PRIMARY KEY (id)\
94
- ) ")
95
-
96
- # Create test results table
97
- mycursor.execute("CREATE TABLE testResults(\
98
- pipelineId INT,\
99
- nodename VARCHAR(100) NOT NULL, \
100
- confusionmatrix VARCHAR(255),\
101
- accuracy LONG,\
102
- sensivity LONG,\
103
- ppv LONG,\
104
- npv LONG,\
105
- f1score LONG,\
106
- fpr LONG,\
107
- tpr LONG, \
108
- PRIMARY KEY (pipelineId , nodename)\
109
- ) ")
110
-
111
- # Create FederatedDataset table
112
- mycursor.execute("CREATE TABLE FedDatasets (\
113
- FedId int NOT NULL AUTO_INCREMENT,\
114
- FLsetupId int DEFAULT NULL,\
115
- FLpipeId int DEFAULT NULL,\
116
- name varchar(255) NOT NULL,\
117
- PRIMARY KEY (FedId)\
118
- )")
119
-
120
- # Commit and close the cursor
121
- mydb.commit()
122
- mycursor.close()
123
- mydb.close()
124
-
125
- except Error as e:
126
- print(f"Error: {e}")
127
-
128
- if __name__ == "__main__":
129
- if len(sys.argv) != 2:
130
- print("Usage: python script.py <path_to_csv_file>")
131
- sys.exit(1)
132
- csv_file_path = sys.argv[1]
133
- main(csv_file_path)
1
+ import sys
2
+ import mysql.connector
3
+ import pandas as pd
4
+ from mysql.connector import Error
5
+
6
+ from configparser import ConfigParser
7
+ import os
8
+
9
+ def main(csv_file_path):
10
+ try:
11
+ # Get the directory of the current script
12
+ current_directory = os.path.dirname(os.path.abspath(__file__))
13
+
14
+ # Load configuration from the config file
15
+ config_file_path = os.path.join(current_directory, 'db_config.ini')
16
+
17
+ config = ConfigParser()
18
+ config.read(config_file_path)
19
+ mysql_config = config['mysql']
20
+
21
+ print('Im here !')
22
+
23
+ mydb = mysql.connector.connect(host=mysql_config['host'], user=mysql_config['user'], password=mysql_config['password'])
24
+ mycursor = mydb.cursor()
25
+
26
+ # Create the 'MEDfl' database if it doesn't exist
27
+ mycursor.execute("CREATE DATABASE IF NOT EXISTS MEDfl")
28
+
29
+ # Select the 'MEDfl' database
30
+ mycursor.execute("USE MEDfl")
31
+
32
+ # Get the list of all tables in the database
33
+ mycursor.execute("SHOW TABLES")
34
+ tables = mycursor.fetchall()
35
+
36
+ # Drop each table one by one
37
+ for table in tables:
38
+ table_name = table[0]
39
+ mycursor.execute(f"DROP TABLE IF EXISTS {table_name}")
40
+
41
+ # Create Networks table
42
+ mycursor.execute(
43
+ "CREATE TABLE Networks( \
44
+ NetId INT NOT NULL AUTO_INCREMENT, \
45
+ NetName VARCHAR(255), \
46
+ PRIMARY KEY (NetId) \
47
+ );"
48
+ )
49
+
50
+ # Create FLsetup table
51
+ mycursor.execute("CREATE TABLE FLsetup (\
52
+ FLsetupId int NOT NULL AUTO_INCREMENT,\
53
+ name varchar(255) NOT NULL, \
54
+ description varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NOT NULL,\
55
+ creation_date datetime NOT NULL,\
56
+ NetId int NOT NULL,\
57
+ column_name varchar(255) DEFAULT NULL,\
58
+ PRIMARY KEY (`FLsetupId`) \
59
+ )")
60
+
61
+ # Create Nodes table
62
+ mycursor.execute("CREATE TABLE Nodes ( \
63
+ NodeId int NOT NULL AUTO_INCREMENT,\
64
+ NodeName varchar(255) DEFAULT NULL,\
65
+ train tinyint(1) DEFAULT '1',\
66
+ NetId int DEFAULT NULL,\
67
+ PRIMARY KEY (NodeId)\
68
+ )")
69
+
70
+ data_df = pd.read_csv(csv_file_path)
71
+ columns = data_df.columns.tolist()
72
+ column_map = {"object": "VARCHAR(255)", "int64": "INT", "float64": "FLOAT"}
73
+ sub_query = "".join(f"{col} {column_map[str(data_df[col].dtype)]}," for col in columns)
74
+
75
+ # Create Datasets table by getting columns from the master csv file
76
+ mycursor.execute(
77
+ f"CREATE TABLE DataSets( \
78
+ DataSetId INT NOT NULL AUTO_INCREMENT, \
79
+ DataSetName VARCHAR(255), \
80
+ NodeId INT CHECK (NodeId = -1 OR NodeId IS NOT NULL),\
81
+ {sub_query}\
82
+ PRIMARY KEY (DataSetId)\
83
+ )"
84
+ )
85
+
86
+ # Create FLpipeline table
87
+ mycursor.execute("CREATE TABLE FLpipeline(\
88
+ id int NOT NULL AUTO_INCREMENT,\
89
+ name varchar(255) NOT NULL, \
90
+ description varchar(255) NOT NULL,\
91
+ creation_date datetime NOT NULL,\
92
+ results longtext NOT NULL,\
93
+ PRIMARY KEY (id)\
94
+ ) ")
95
+
96
+ # Create test results table
97
+ mycursor.execute("CREATE TABLE testResults(\
98
+ pipelineId INT,\
99
+ nodename VARCHAR(100) NOT NULL, \
100
+ confusionmatrix VARCHAR(255),\
101
+ accuracy LONG,\
102
+ sensivity LONG,\
103
+ ppv LONG,\
104
+ npv LONG,\
105
+ f1score LONG,\
106
+ fpr LONG,\
107
+ tpr LONG, \
108
+ PRIMARY KEY (pipelineId , nodename)\
109
+ ) ")
110
+
111
+ # Create FederatedDataset table
112
+ mycursor.execute("CREATE TABLE FedDatasets (\
113
+ FedId int NOT NULL AUTO_INCREMENT,\
114
+ FLsetupId int DEFAULT NULL,\
115
+ FLpipeId int DEFAULT NULL,\
116
+ name varchar(255) NOT NULL,\
117
+ PRIMARY KEY (FedId)\
118
+ )")
119
+
120
+ # Commit and close the cursor
121
+ mydb.commit()
122
+ mycursor.close()
123
+ mydb.close()
124
+
125
+ except Error as e:
126
+ print(f"Error: {e}")
127
+
128
+ if __name__ == "__main__":
129
+ if len(sys.argv) != 2:
130
+ print("Usage: python script.py <path_to_csv_file>")
131
+ sys.exit(1)
132
+ csv_file_path = sys.argv[1]
133
+ main(csv_file_path)
@@ -1,32 +0,0 @@
1
- MEDfl-0.1.26.data/scripts/setup_mysql.sh,sha256=PXl271yvYBrXwrZ7P0tsYHnGkOubKvRaFee4MnzsSko,560
2
- Medfl/__init__.py,sha256=wamSaVIS4RoIHcTCLQPKQ5f8odcy87mRCKyRR7-Qpu4,57
3
- Medfl/LearningManager/__init__.py,sha256=mvlAmHEHljXGaB6Ij0EPN0Txw21qX25ELK3X7QkoVwA,358
4
- Medfl/LearningManager/client.py,sha256=9WyLYCsI9JuHjneLbbzDf7HtzjYINuLfqwkbxOsrBrE,6083
5
- Medfl/LearningManager/dynamicModal.py,sha256=0mTvDJlss0uSJ3_EXOuL_d-zRmFyXaKB4W4ql-uEX8Y,10821
6
- Medfl/LearningManager/federated_dataset.py,sha256=fQqIbhO6LSk16Ob9z6RohaZ8X71Ff-yueynjulrl4M0,2141
7
- Medfl/LearningManager/flpipeline.py,sha256=M4-OL4nlogv08J_YsyDsGHXR6xe8BWx4HIsuL1QyUvY,7303
8
- Medfl/LearningManager/model.py,sha256=DA7HP34Eq1Ra65OlkBmjH9d2MD7OEbsOhfxD48l4QOk,7679
9
- Medfl/LearningManager/params.yaml,sha256=5I-NljhnSaqzjkWVNzrOtrB8z7tnHBKiBmY-mKGhBQM,450
10
- Medfl/LearningManager/params_optimiser.py,sha256=pjhDskhSPuca-jnarYoJcFVBvRkdD9tD3992q_eMPSE,18060
11
- Medfl/LearningManager/plot.py,sha256=iPqMV9rVd7hquoFixDL20OzXI5bMpBW41bkVmTKIWtE,7927
12
- Medfl/LearningManager/server.py,sha256=7edxPkZ9Ju3Mep_BSHQpUNgW9HKfCui3_l996buJVlU,7258
13
- Medfl/LearningManager/strategy.py,sha256=n0asQajkHfGLDX3QbbV5qntQA-xuJZU8Z92XccZENsA,3517
14
- Medfl/LearningManager/utils.py,sha256=gAFkA4cUimMaUh40lvveL8b7NvB8zPjWonwZKVk8HpE,9342
15
- Medfl/NetManager/__init__.py,sha256=RhO9Ya6wXOdM6qO58wjTD-lNL7-q8KvPDvSccYP9wUY,246
16
- Medfl/NetManager/database_connector.py,sha256=zZYOYD1ZGpdOxiH_HuMFnofjJimOcnoZ02fdD0Rkh9E,1538
17
- Medfl/NetManager/dataset.py,sha256=eEuVzCp5dGD4tvDVKq6jlSReecge7T20ByG4d7_cnXU,2869
18
- Medfl/NetManager/flsetup.py,sha256=CS7531I08eLm6txMIDWFMCIrPP-dNpOLBTaR2BR6X0c,11754
19
- Medfl/NetManager/net_helper.py,sha256=leXwpkDewj-_bXZUO3S_DscELnyZogb6jmz6Bjrsmag,6860
20
- Medfl/NetManager/net_manager_queries.py,sha256=2dfhba0iuh40kVoC7am-vC4Hlrvr-zfJ5ESymsI1Kps,4327
21
- Medfl/NetManager/network.py,sha256=NPHaSSLLA9FIBkqg3Il8g7VDQ1Ds8rH4d91srT6wUNI,5730
22
- Medfl/NetManager/node.py,sha256=HQPKy-RHs5SkVxPo5EdjD-W9XK--TaFXa49ooua5_kU,6344
23
- alembic/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
- alembic/env.py,sha256=a4zJAzPNLHnIrUlXCqf_8vuAlFu0pceFJJKM1PQaOI4,1649
25
- scripts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
- scripts/base.py,sha256=pR7StIt3PpX30aoh53gMkpeNJMHytAPhdc7N09tCITA,781
27
- scripts/config.ini,sha256=tDQQlpwZbCFQuSS017yjEiLglLihp6wETbWtNrSWeAA,82
28
- scripts/create_db.py,sha256=iWE1z33rU_KbIeqoVzdWLhDRLk00TcRf2iYuqpDzOjw,4494
29
- MEDfl-0.1.26.dist-info/METADATA,sha256=cX617jlzpKbpwieidkfDyDT4Vi4sK1NMj87BFCc1qZ8,5580
30
- MEDfl-0.1.26.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
31
- MEDfl-0.1.26.dist-info/top_level.txt,sha256=hMAmSbfVxxQBmDx0uaQeXYlSrdC42iD58FyzJGl2lAs,22
32
- MEDfl-0.1.26.dist-info/RECORD,,