epyt-flow 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (131) hide show
  1. epyt_flow/EPANET/EPANET/SRC_engines/AUTHORS +28 -0
  2. epyt_flow/EPANET/EPANET/SRC_engines/LICENSE +21 -0
  3. epyt_flow/EPANET/EPANET/SRC_engines/Readme_SRC_Engines.txt +18 -0
  4. epyt_flow/EPANET/EPANET/SRC_engines/enumstxt.h +134 -0
  5. epyt_flow/EPANET/EPANET/SRC_engines/epanet.c +5578 -0
  6. epyt_flow/EPANET/EPANET/SRC_engines/epanet2.c +865 -0
  7. epyt_flow/EPANET/EPANET/SRC_engines/epanet2.def +131 -0
  8. epyt_flow/EPANET/EPANET/SRC_engines/errors.dat +73 -0
  9. epyt_flow/EPANET/EPANET/SRC_engines/funcs.h +193 -0
  10. epyt_flow/EPANET/EPANET/SRC_engines/genmmd.c +1000 -0
  11. epyt_flow/EPANET/EPANET/SRC_engines/hash.c +177 -0
  12. epyt_flow/EPANET/EPANET/SRC_engines/hash.h +28 -0
  13. epyt_flow/EPANET/EPANET/SRC_engines/hydcoeffs.c +1151 -0
  14. epyt_flow/EPANET/EPANET/SRC_engines/hydraul.c +1117 -0
  15. epyt_flow/EPANET/EPANET/SRC_engines/hydsolver.c +720 -0
  16. epyt_flow/EPANET/EPANET/SRC_engines/hydstatus.c +476 -0
  17. epyt_flow/EPANET/EPANET/SRC_engines/include/epanet2.h +431 -0
  18. epyt_flow/EPANET/EPANET/SRC_engines/include/epanet2_2.h +1786 -0
  19. epyt_flow/EPANET/EPANET/SRC_engines/include/epanet2_enums.h +468 -0
  20. epyt_flow/EPANET/EPANET/SRC_engines/inpfile.c +810 -0
  21. epyt_flow/EPANET/EPANET/SRC_engines/input1.c +707 -0
  22. epyt_flow/EPANET/EPANET/SRC_engines/input2.c +864 -0
  23. epyt_flow/EPANET/EPANET/SRC_engines/input3.c +2170 -0
  24. epyt_flow/EPANET/EPANET/SRC_engines/main.c +93 -0
  25. epyt_flow/EPANET/EPANET/SRC_engines/mempool.c +142 -0
  26. epyt_flow/EPANET/EPANET/SRC_engines/mempool.h +24 -0
  27. epyt_flow/EPANET/EPANET/SRC_engines/output.c +852 -0
  28. epyt_flow/EPANET/EPANET/SRC_engines/project.c +1359 -0
  29. epyt_flow/EPANET/EPANET/SRC_engines/quality.c +685 -0
  30. epyt_flow/EPANET/EPANET/SRC_engines/qualreact.c +743 -0
  31. epyt_flow/EPANET/EPANET/SRC_engines/qualroute.c +694 -0
  32. epyt_flow/EPANET/EPANET/SRC_engines/report.c +1489 -0
  33. epyt_flow/EPANET/EPANET/SRC_engines/rules.c +1362 -0
  34. epyt_flow/EPANET/EPANET/SRC_engines/smatrix.c +871 -0
  35. epyt_flow/EPANET/EPANET/SRC_engines/text.h +497 -0
  36. epyt_flow/EPANET/EPANET/SRC_engines/types.h +874 -0
  37. epyt_flow/EPANET/EPANET-MSX/MSX_Updates.txt +53 -0
  38. epyt_flow/EPANET/EPANET-MSX/Src/dispersion.h +27 -0
  39. epyt_flow/EPANET/EPANET-MSX/Src/hash.c +107 -0
  40. epyt_flow/EPANET/EPANET-MSX/Src/hash.h +28 -0
  41. epyt_flow/EPANET/EPANET-MSX/Src/include/epanetmsx.h +102 -0
  42. epyt_flow/EPANET/EPANET-MSX/Src/include/epanetmsx_export.h +42 -0
  43. epyt_flow/EPANET/EPANET-MSX/Src/mathexpr.c +937 -0
  44. epyt_flow/EPANET/EPANET-MSX/Src/mathexpr.h +39 -0
  45. epyt_flow/EPANET/EPANET-MSX/Src/mempool.c +204 -0
  46. epyt_flow/EPANET/EPANET-MSX/Src/mempool.h +24 -0
  47. epyt_flow/EPANET/EPANET-MSX/Src/msxchem.c +1285 -0
  48. epyt_flow/EPANET/EPANET-MSX/Src/msxcompiler.c +368 -0
  49. epyt_flow/EPANET/EPANET-MSX/Src/msxdict.h +42 -0
  50. epyt_flow/EPANET/EPANET-MSX/Src/msxdispersion.c +586 -0
  51. epyt_flow/EPANET/EPANET-MSX/Src/msxerr.c +116 -0
  52. epyt_flow/EPANET/EPANET-MSX/Src/msxfile.c +260 -0
  53. epyt_flow/EPANET/EPANET-MSX/Src/msxfuncs.c +175 -0
  54. epyt_flow/EPANET/EPANET-MSX/Src/msxfuncs.h +35 -0
  55. epyt_flow/EPANET/EPANET-MSX/Src/msxinp.c +1504 -0
  56. epyt_flow/EPANET/EPANET-MSX/Src/msxout.c +401 -0
  57. epyt_flow/EPANET/EPANET-MSX/Src/msxproj.c +791 -0
  58. epyt_flow/EPANET/EPANET-MSX/Src/msxqual.c +2010 -0
  59. epyt_flow/EPANET/EPANET-MSX/Src/msxrpt.c +400 -0
  60. epyt_flow/EPANET/EPANET-MSX/Src/msxtank.c +422 -0
  61. epyt_flow/EPANET/EPANET-MSX/Src/msxtoolkit.c +1164 -0
  62. epyt_flow/EPANET/EPANET-MSX/Src/msxtypes.h +551 -0
  63. epyt_flow/EPANET/EPANET-MSX/Src/msxutils.c +524 -0
  64. epyt_flow/EPANET/EPANET-MSX/Src/msxutils.h +56 -0
  65. epyt_flow/EPANET/EPANET-MSX/Src/newton.c +158 -0
  66. epyt_flow/EPANET/EPANET-MSX/Src/newton.h +34 -0
  67. epyt_flow/EPANET/EPANET-MSX/Src/rk5.c +287 -0
  68. epyt_flow/EPANET/EPANET-MSX/Src/rk5.h +39 -0
  69. epyt_flow/EPANET/EPANET-MSX/Src/ros2.c +293 -0
  70. epyt_flow/EPANET/EPANET-MSX/Src/ros2.h +35 -0
  71. epyt_flow/EPANET/EPANET-MSX/Src/smatrix.c +816 -0
  72. epyt_flow/EPANET/EPANET-MSX/Src/smatrix.h +29 -0
  73. epyt_flow/EPANET/EPANET-MSX/readme.txt +14 -0
  74. epyt_flow/EPANET/compile.sh +4 -0
  75. epyt_flow/VERSION +1 -0
  76. epyt_flow/__init__.py +24 -0
  77. epyt_flow/data/__init__.py +0 -0
  78. epyt_flow/data/benchmarks/__init__.py +11 -0
  79. epyt_flow/data/benchmarks/batadal.py +257 -0
  80. epyt_flow/data/benchmarks/batadal_data.py +28 -0
  81. epyt_flow/data/benchmarks/battledim.py +473 -0
  82. epyt_flow/data/benchmarks/battledim_data.py +51 -0
  83. epyt_flow/data/benchmarks/gecco_water_quality.py +267 -0
  84. epyt_flow/data/benchmarks/leakdb.py +592 -0
  85. epyt_flow/data/benchmarks/leakdb_data.py +18923 -0
  86. epyt_flow/data/benchmarks/water_usage.py +123 -0
  87. epyt_flow/data/networks.py +650 -0
  88. epyt_flow/gym/__init__.py +4 -0
  89. epyt_flow/gym/control_gyms.py +47 -0
  90. epyt_flow/gym/scenario_control_env.py +101 -0
  91. epyt_flow/metrics.py +404 -0
  92. epyt_flow/models/__init__.py +2 -0
  93. epyt_flow/models/event_detector.py +31 -0
  94. epyt_flow/models/sensor_interpolation_detector.py +118 -0
  95. epyt_flow/rest_api/__init__.py +4 -0
  96. epyt_flow/rest_api/base_handler.py +70 -0
  97. epyt_flow/rest_api/res_manager.py +95 -0
  98. epyt_flow/rest_api/scada_data_handler.py +476 -0
  99. epyt_flow/rest_api/scenario_handler.py +352 -0
  100. epyt_flow/rest_api/server.py +106 -0
  101. epyt_flow/serialization.py +438 -0
  102. epyt_flow/simulation/__init__.py +5 -0
  103. epyt_flow/simulation/events/__init__.py +6 -0
  104. epyt_flow/simulation/events/actuator_events.py +259 -0
  105. epyt_flow/simulation/events/event.py +81 -0
  106. epyt_flow/simulation/events/leakages.py +404 -0
  107. epyt_flow/simulation/events/sensor_faults.py +267 -0
  108. epyt_flow/simulation/events/sensor_reading_attack.py +185 -0
  109. epyt_flow/simulation/events/sensor_reading_event.py +170 -0
  110. epyt_flow/simulation/events/system_event.py +88 -0
  111. epyt_flow/simulation/parallel_simulation.py +147 -0
  112. epyt_flow/simulation/scada/__init__.py +3 -0
  113. epyt_flow/simulation/scada/advanced_control.py +134 -0
  114. epyt_flow/simulation/scada/scada_data.py +1589 -0
  115. epyt_flow/simulation/scada/scada_data_export.py +255 -0
  116. epyt_flow/simulation/scenario_config.py +608 -0
  117. epyt_flow/simulation/scenario_simulator.py +1897 -0
  118. epyt_flow/simulation/scenario_visualizer.py +61 -0
  119. epyt_flow/simulation/sensor_config.py +1289 -0
  120. epyt_flow/topology.py +290 -0
  121. epyt_flow/uncertainty/__init__.py +3 -0
  122. epyt_flow/uncertainty/model_uncertainty.py +302 -0
  123. epyt_flow/uncertainty/sensor_noise.py +73 -0
  124. epyt_flow/uncertainty/uncertainties.py +555 -0
  125. epyt_flow/uncertainty/utils.py +206 -0
  126. epyt_flow/utils.py +306 -0
  127. epyt_flow-0.1.0.dist-info/LICENSE +21 -0
  128. epyt_flow-0.1.0.dist-info/METADATA +139 -0
  129. epyt_flow-0.1.0.dist-info/RECORD +131 -0
  130. epyt_flow-0.1.0.dist-info/WHEEL +5 -0
  131. epyt_flow-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,29 @@
1
+
2
+ #ifndef SMATRIX_H
3
+ #define SMATRIX_H
4
+
5
+ #include "msxtypes.h"
6
+
7
+ /* ----------- SMATRIX.C ---------------*/
8
+ int msx_createsparse(void); /* Creates sparse matrix */
9
+ int allocsparse(void); /* Allocates matrix memory */
10
+ void msx_freesparse(void); /* Frees matrix memory */
11
+ int buildlists(int); /* Builds adjacency lists */
12
+ int paralink(int, int, int); /* Checks for parallel links */
13
+ void xparalinks(void); /* Removes parallel links */
14
+ void freelists(void); /* Frees adjacency lists */
15
+ void countdegree(void); /* Counts links at each node */
16
+ int reordernodes(void); /* Finds a node re-ordering */
17
+ int mindegree(int, int); /* Finds min. degree node */
18
+ int growlist(int); /* Augments adjacency list */
19
+ int newlink(Padjlist); /* Adds fill-ins for a node */
20
+ int linked(int, int); /* Checks if 2 nodes linked */
21
+ int addlink(int, int, int); /* Creates new fill-in */
22
+ int storesparse(int); /* Stores sparse matrix */
23
+ int ordersparse(int); /* Orders matrix storage */
24
+ void transpose(int, int*, int*, /* Transposes sparse matrix */
25
+ int*, int*, int*, int*, int*);
26
+ int msx_linsolve(int, double*, double*, /* Solution of linear eqns. */
27
+ double*); /* via Cholesky factorization */
28
+
29
+ #endif
@@ -0,0 +1,14 @@
1
+ The EPANET-MSX program is a free software and can be used to model water quality problems involving multiple components.
2
+ EPANET-MSX will only run correctly with release 2.0.12 or higher of the EPANET2 engine.
3
+
4
+ CMake (https://cmake.org/) can be used to build EPANETMSX applications. The project's CMake file (CMakeLists.txt) is located in its
5
+ root directory and supports builds for Linux, Mac OS and Windows. To build the EPANETMSX library and its command line executable
6
+ using CMake, first open a console window and navigate to the project's root directory. Then enter the following commands:
7
+
8
+ mkdir build
9
+ cd build
10
+ cmake ..
11
+ cmake --build . --config Release
12
+
13
+ Note: under Windows, the third command should be cmake .. -A Win32 for a 32-bit build or cmake .. -A x64 for a 64-bit build
14
+ when Microsoft Visual Studio is the default compiler. 64-bit EPANETMSX application need to work with 64-bit EPANET2 engine.
@@ -0,0 +1,4 @@
1
+ #!/bin/bash
2
+ mkdir -p "../customlibs/"
3
+ gcc -w -shared -Wl,-soname,libepanet2_2.so -fPIC -o "../customlibs/libepanet2_2.so" EPANET/SRC_engines/*.c -IEPANET/SRC_engines/include -lc -lm -pthread
4
+ gcc -w -fPIC -shared -Wl,-soname,libepanetmsx2_2_0.so -o "../customlibs/libepanetmsx2_2_0.so" -fopenmp -Depanetmsx_EXPORTS -IEPANET-MSX/Src/include -IEPANET/SRC_engines/include EPANET-MSX/Src/*.c -Wl,-rpath=. "../customlibs/libepanet2_2.so" -lm -lgomp -lpthread
epyt_flow/VERSION ADDED
@@ -0,0 +1 @@
1
+ 0.1.0
epyt_flow/__init__.py ADDED
@@ -0,0 +1,24 @@
1
+ import sys
2
+ import subprocess
3
+ import os
4
+ import shutil
5
+
6
+
7
+ with open(os.path.join(os.path.dirname(__file__), 'VERSION'), encoding="utf-8") as f:
8
+ VERSION = f.read().strip()
9
+
10
+
11
+ if sys.platform.startswith("linux"):
12
+ path_to_custom_libs = os.path.join(os.path.dirname(__file__), "customlibs")
13
+ path_to_lib_epanet = os.path.join(path_to_custom_libs, "libepanet2_2.so")
14
+
15
+ update = False
16
+ if os.path.isfile(path_to_lib_epanet):
17
+ if os.path.getmtime(__file__) > os.path.getmtime(path_to_lib_epanet):
18
+ update = True
19
+
20
+ if not os.path.isfile(path_to_lib_epanet) or update is True:
21
+ if shutil.which("gcc") is not None:
22
+ print("Compiling EPANET and EPANET-MSX...")
23
+ path_to_epanet = os.path.join(os.path.dirname(__file__), "EPANET")
24
+ subprocess.check_call(f"cd \"{path_to_epanet}\"; bash compile.sh", shell=True)
File without changes
@@ -0,0 +1,11 @@
1
+ from .battledim import load_scenario as load_battledim_scenario, \
2
+ load_scada_data as load_battledim_scada_data, load_data as load_battledim_data, \
3
+ compute_evaluation_score as compute_battledim_evaluation_score
4
+ from .leakdb import load_scenarios as load_leakdb_scenarios, \
5
+ load_scada_data as load_leakdb_scada_data, load_data as load_leakdb_data, \
6
+ compute_evaluation_score as compute_leakdb_evaluation_score
7
+ from .batadal import load_scenario as load_batadal_scenario, \
8
+ load_scada_data as load_batadal_scada_data, load_data as load_batadal_data
9
+ from .gecco_water_quality import load_gecco2017_water_quality_data, \
10
+ load_gecco2018_water_quality_data, load_gecco2019_water_quality_data
11
+ from .water_usage import load_water_usage
@@ -0,0 +1,257 @@
1
+ """
2
+ The BATtle of the Attack Detection ALgorithms (*BATADAL*) by Riccardo Taormina, Stefano Galelli,
3
+ Nils Ole Tippenhauer, Avi Ostfeld, Elad Salomons, Demetrios Eliades is a competition on planning
4
+ and management of water networks undertaken within the Water Distribution Systems Analysis
5
+ Symposium. The goal of the battle was to compare the performance of algorithms for the detection
6
+ of cyber-physical attacks, whose frequency has increased in the last few years along with the
7
+ adoption of smart water technologies. The design challenge was set for the C-Town network,
8
+ a real-world, medium-sized water distribution system operated through programmable logic
9
+ controllers and a supervisory control and data acquisition (SCADA) system. Participants were
10
+ provided with data sets containing (simulated) SCADA observations, and challenged to design
11
+ an attack detection algorithm. The effectiveness of all submitted algorithms was evaluated in
12
+ terms of time-to-detection and classification accuracy. Seven teams participated in the battle
13
+ and proposed a variety of successful approaches leveraging data analysis, model-based detection
14
+ mechanisms, and rule checking. Results were presented at the Water Distribution Systems Analysis
15
+ Symposium (World Environmental and Water Resources Congress) in Sacramento, California on
16
+ May 21-25, 2017.
17
+ The `paper <https://doi.org/10.1061/(ASCE)WR.1943-5452.0000969>`_ summarizes the BATADAL
18
+ problem, proposed algorithms, results, and future research directions.
19
+
20
+ See https://www.batadal.net/ for details.
21
+
22
+ This module provides functions for loading the original BATADAL data set
23
+ :func:`~epyt_flow.data.benchmarks.batadal.load_data`, as well as functions for loading the
24
+ scenarios :func:`~epyt_flow.data.benchmarks.batadal.load_scenario` and pre-generated
25
+ SCADA data :func:`~epyt_flow.data.benchmarks.batadal.load_scada_data`.
26
+ """
27
+ import os
28
+ from typing import Any
29
+ from datetime import datetime
30
+ import pandas as pd
31
+ import numpy as np
32
+
33
+ from .batadal_data import TRAINING_DATA_2_ATTACKS_TIME, TRAINING_DATA_2_START_TIME, \
34
+ TEST_DATA_ATTACKS_TIME, TEST_DATA_START_TIME
35
+ from ...utils import get_temp_folder, unpack_zip_archive, to_seconds, download_if_necessary
36
+ from ...simulation import ScenarioConfig
37
+
38
+
39
+ def __parse_attacks_time(start_time: str, attacks_time):
40
+ events = []
41
+ for event in attacks_time.splitlines():
42
+ # Parse entry
43
+ items = [i.strip() for i in event.split(",")]
44
+
45
+ event_start_time = int((datetime.strptime(items[0], "%d/%m/%Y %H:%M") - start_time)
46
+ .total_seconds())
47
+ event_end_time = int((datetime.strptime(items[1], "%d/%m/%Y %H:%M") - start_time)
48
+ .total_seconds())
49
+
50
+ events.append((event_start_time, event_end_time))
51
+
52
+ return events
53
+
54
+
55
+ def load_data(download_dir: str = None, return_X_y: bool = False,
56
+ return_ground_truth: bool = False, return_features_desc: bool = False,
57
+ verbose: bool = True) -> dict:
58
+ """
59
+ Loads the original BATADAL competition data.
60
+
61
+ Parameters
62
+ ----------
63
+ download_dir : `str`, optional
64
+ Path to the data files -- if None, the temp folder will be used.
65
+ If the path does not exist, the data files will be downloaded to the given path.
66
+
67
+ The default is None.
68
+ return_X_y : `bool`, optional
69
+ If True, the data together with the labels is returned as pairs of Numpy arrays.
70
+ Otherwise, the data is returned as Pandas data frames.
71
+
72
+ The default is False.
73
+ return_ground_truth : `bool`
74
+ If True and if `return_X_y` is True, the ground truth labels are included in the
75
+ returned dictionary -- note that the labels provided in the benchmark constitute
76
+ a partial labeling only.
77
+
78
+ The default is False.
79
+ return_features_desc : `bool`
80
+ If True and if `return_X_y` is True, feature names (i.e. descriptions) are included
81
+ in the returned dictionary.
82
+
83
+ The default is False.
84
+ verbose : `bool`, optional
85
+ If True, a progress bar is shown while downloading files.
86
+
87
+ The default is True.
88
+
89
+ Returns
90
+ -------
91
+ `dict`
92
+ Dictionary of the loaded benchmark data. The dictionary contains the two training
93
+ data sets ("train_1" and "train_2"), as well as the test data set ("test").
94
+ If `return_X_y` is False, each dictionary entry is a Pandas dataframe.
95
+ Otherwise, it is a tuple of sensor readings and labels (except for the test set) --
96
+ if `return_ground_truth` is True or `return_features_desc` is True, the corresponding
97
+ data is appended to the tuple.
98
+ """
99
+ download_dir = download_dir if download_dir is not None else get_temp_folder()
100
+
101
+ # Download data
102
+ training_data_1_url = "https://www.batadal.net/data/BATADAL_dataset03.csv"
103
+ training_data_2_url = "https://www.batadal.net/data/BATADAL_dataset04.csv"
104
+ test_data_url = "https://www.batadal.net/data/BATADAL_test_dataset.zip"
105
+
106
+ training_data_1_path = os.path.join(download_dir, "BATADAL_dataset03.csv")
107
+ training_data_2_path = os.path.join(download_dir, "BATADAL_dataset04.csv")
108
+
109
+ download_if_necessary(training_data_1_path, training_data_1_url, verbose)
110
+ download_if_necessary(training_data_2_path, training_data_2_url, verbose)
111
+
112
+ download_if_necessary(os.path.join(download_dir, "BATADAL_test_dataset.zip"),
113
+ test_data_url, verbose)
114
+ unpack_zip_archive(os.path.join(download_dir, "BATADAL_test_dataset.zip"), download_dir)
115
+
116
+ # Load and return data
117
+ df_train_1 = pd.read_csv(training_data_1_path)
118
+ df_train_2 = pd.read_csv(training_data_2_path)
119
+ df_test = pd.read_csv(os.path.join(download_dir, "BATADAL_test_dataset.csv"))
120
+
121
+ if return_X_y is True:
122
+ # Convert data to numpy
123
+ y_train_1 = df_train_1["ATT_FLAG"].to_numpy().astype(np.int8)
124
+ del df_train_1["ATT_FLAG"]
125
+ del df_train_1["DATETIME"]
126
+ X_train_1 = df_train_1.to_numpy()
127
+
128
+ y_train_2 = df_train_2[" ATT_FLAG"].to_numpy()
129
+ idx = np.argwhere(y_train_2 == -999)
130
+ y_train_2[idx] = 0
131
+ y_train_2 = y_train_2.astype(np.int8)
132
+ del df_train_2[" ATT_FLAG"]
133
+ del df_train_2["DATETIME"]
134
+ X_train_2 = df_train_2.to_numpy()
135
+
136
+ del df_test["DATETIME"]
137
+ X_test = df_test.to_numpy()
138
+
139
+ # Create ground truth labels
140
+ hydraulic_time_step = to_seconds(minutes=15)
141
+ training_data_2_events_time = __parse_attacks_time(TRAINING_DATA_2_START_TIME,
142
+ TRAINING_DATA_2_ATTACKS_TIME)
143
+ test_data_events_time = __parse_attacks_time(TEST_DATA_START_TIME, TEST_DATA_ATTACKS_TIME)
144
+
145
+ y_train_2_truth = np.zeros(X_train_2.shape[0])
146
+ for event_start, event_end in training_data_2_events_time:
147
+ t0 = int(event_start / hydraulic_time_step)
148
+ t1 = int(event_end / hydraulic_time_step)
149
+ y_train_2_truth[t0:t1] = 1
150
+
151
+ y_test_truth = np.zeros(X_test.shape[0])
152
+ for event_start, event_end in test_data_events_time:
153
+ t0 = int(event_start / hydraulic_time_step)
154
+ t1 = int(event_end / hydraulic_time_step)
155
+ y_test_truth[t0:t1] = 1
156
+
157
+ # Create features' descriptions
158
+ features_desc = list(df_train_1.columns)
159
+ desc_mapping = {"PU": "Pump", "V": "Valve", "T": "Tank", "L": "Level", "S": "State",
160
+ "P": "Pressure", "F": "Flow"}
161
+ for i, f_desc in enumerate(features_desc):
162
+ pump = False
163
+ for k, value in desc_mapping.items():
164
+ if k in f_desc:
165
+ if k == "P" and pump is True:
166
+ continue
167
+ f_desc = f_desc.replace(k, value)
168
+ if k == "PU":
169
+ pump = True
170
+ features_desc[i] = f_desc
171
+
172
+ # Create final results
173
+ r = {"train_1": (X_train_1, y_train_1), "train_2": (X_train_2, y_train_2),
174
+ "test": X_test}
175
+
176
+ if return_ground_truth is True:
177
+ r["train_1"] = (r["train_1"][0], r["train_1"][1], y_train_1)
178
+ r["train_2"] = (r["train_2"][0], r["train_2"][1], y_train_2_truth)
179
+ r["test"] = (r["test"][0], y_test_truth)
180
+
181
+ if return_features_desc is True:
182
+ r["features_desc"] = features_desc
183
+
184
+ return r
185
+ else:
186
+ return {"train_1": df_train_1, "train_2": df_train_2, "test": df_test}
187
+
188
+
189
+ def load_scada_data(download_dir: str = None, return_X_y: bool = False,
190
+ return_ground_truth: bool = False, return_features_desc: bool = False,
191
+ verbose: bool = True) -> Any:
192
+ """
193
+ Loads the SCADA data of the simulated BATADAL benchmark scenario -- note that due to
194
+ randomness and undocumented aspects of the original BATADAL data set, these differ from
195
+ the original data set which can be loaded by calling
196
+ :func:`~epyt_flow.data.benchmarks.batadal.load_data`.
197
+
198
+ Parameters
199
+ ----------
200
+ download_dir : `str`, optional
201
+ Path to the data files -- if None, the temp folder will be used.
202
+ If the path does not exist, the data files will be downloaded to the given path.
203
+
204
+ The default is None.
205
+ return_X_y : `bool`, optional
206
+ If True, the data together with the labels is returned as pairs of Numpy arrays.
207
+ Otherwisen the data is returned as Pandas data frames.
208
+
209
+ The default is False.
210
+ return_ground_truth : `bool`
211
+ If True and if `return_X_y` is True, the ground truth labels are included in the
212
+ returned dictionary -- note that the labels provided in the benchmark constitute
213
+ a partial labeling only.
214
+
215
+ The default is False.
216
+ return_features_desc : `bool`
217
+ If True and if `return_X_y` is True, feature names (i.e. descriptions) are included
218
+ in the returned dictionary.
219
+
220
+ The default is False.
221
+ verbose : `bool`, optional
222
+ If True, a progress bar is shown while downloading files.
223
+
224
+ The default is True.
225
+ """
226
+ raise NotImplementedError()
227
+
228
+
229
+ def load_scenario(download_dir: str = None, verbose: bool = True) -> ScenarioConfig:
230
+ """
231
+ Creates and returns the BATADAL scenario -- it can be either modified or directly passed
232
+ to the simulator :class:`~epyt_flow.simulation.scenario_simulator.ScenarioSimulator`.
233
+
234
+ .. note::
235
+
236
+ Note that due to randomness and undocumented aspects of the original BATADAL benchmark,
237
+ the scenario simulation results differ from the original data set which can be loaded by
238
+ calling :func:`~epyt_flow.data.benchmarks.batadal.load_data`.
239
+
240
+ Parameters
241
+ ----------
242
+ download_dir : `str`, optional
243
+ Path to the data files -- if None, the temp folder will be used.
244
+ If the path does not exist, the data files will be downloaded to the given path.
245
+
246
+ The default is None.
247
+ verbose : `bool`, optional
248
+ If True, a progress bar is shown while downloading files.
249
+
250
+ The default is True.
251
+
252
+ Returns
253
+ -------
254
+ :class:`~epyt_flow.simulation.scenario_config.ScenarioConfig`
255
+ The BATADAL scenario.
256
+ """
257
+ raise NotImplementedError()
@@ -0,0 +1,28 @@
1
+ """
2
+ Module provides the event configurations for BATADAL.
3
+ """
4
+ from datetime import datetime
5
+
6
+
7
+ TRAINING_DATA_2_START_TIME = datetime.strptime("04/07/2016 00:00", "%d/%m/%Y %H:%M")
8
+
9
+ TRAINING_DATA_2_ATTACKS_TIME = \
10
+ """13/09/2016 23:00, 16/09/2016 00:00
11
+ 26/09/2016 11:00, 27/09/2016 10:00
12
+ 09/10/2016 09:00, 11/10/2016 20:00
13
+ 29/10/2016 19:00, 02/11/2016 16:00
14
+ 26/11/2016 17:00, 29/11/2016 04:00
15
+ 06/12/2016 07:00, 10/12/2016 04:00
16
+ 14/12/2016 15:00, 19/12/2016 04:00"""
17
+
18
+
19
+ TEST_DATA_START_TIME = datetime.strptime("04/01/2017 00:00", "%d/%m/%Y %H:%M")
20
+
21
+ TEST_DATA_ATTACKS_TIME = \
22
+ """16/01/2017 09:00, 19/01/2017 06:00
23
+ 30/01/2017 08:00, 02/02/2017 00:00
24
+ 09/02/2017 03:00, 10/02/2017 09:00
25
+ 12/02/2017 01:00, 13/02/2017 07:00
26
+ 24/02/2017 05:00, 28/02/2017 08:00
27
+ 10/03/2017 14:00, 13/03/2017 21:00
28
+ 25/03/2017 20:00, 27/03/2017 01:00"""