tsadmetrics 0.1.17__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {docs_manual → docs/api_doc}/conf.py +3 -26
- docs/{conf.py → full_doc/conf.py} +1 -1
- {docs_api → docs/manual_doc}/conf.py +3 -26
- examples/example_direct_data.py +28 -0
- examples/example_direct_single_data.py +25 -0
- examples/example_file_reference.py +24 -0
- examples/example_global_config_file.py +13 -0
- examples/example_metric_config_file.py +19 -0
- examples/example_simple_metric.py +8 -0
- examples/specific_examples/AbsoluteDetectionDistance_example.py +24 -0
- examples/specific_examples/AffiliationbasedFScore_example.py +24 -0
- examples/specific_examples/AverageDetectionCount_example.py +24 -0
- examples/specific_examples/CompositeFScore_example.py +24 -0
- examples/specific_examples/DelayThresholdedPointadjustedFScore_example.py +24 -0
- examples/specific_examples/DetectionAccuracyInRange_example.py +24 -0
- examples/specific_examples/EnhancedTimeseriesAwareFScore_example.py +24 -0
- examples/specific_examples/LatencySparsityawareFScore_example.py +24 -0
- examples/specific_examples/MeanTimeToDetect_example.py +24 -0
- examples/specific_examples/NabScore_example.py +24 -0
- examples/specific_examples/PateFScore_example.py +24 -0
- examples/specific_examples/Pate_example.py +24 -0
- examples/specific_examples/PointadjustedAtKFScore_example.py +24 -0
- examples/specific_examples/PointadjustedAucPr_example.py +24 -0
- examples/specific_examples/PointadjustedAucRoc_example.py +24 -0
- examples/specific_examples/PointadjustedFScore_example.py +24 -0
- examples/specific_examples/RangebasedFScore_example.py +24 -0
- examples/specific_examples/SegmentwiseFScore_example.py +24 -0
- examples/specific_examples/TemporalDistance_example.py +24 -0
- examples/specific_examples/TimeTolerantFScore_example.py +24 -0
- examples/specific_examples/TimeseriesAwareFScore_example.py +24 -0
- examples/specific_examples/TotalDetectedInRange_example.py +24 -0
- examples/specific_examples/VusPr_example.py +24 -0
- examples/specific_examples/VusRoc_example.py +24 -0
- examples/specific_examples/WeightedDetectionDifference_example.py +24 -0
- tests/test_dpm.py +212 -0
- tests/test_ptdm.py +366 -0
- tests/test_registry.py +58 -0
- tests/test_runner.py +185 -0
- tests/test_spm.py +213 -0
- tests/test_tmem.py +198 -0
- tests/test_tpdm.py +369 -0
- tests/test_tstm.py +338 -0
- tsadmetrics/__init__.py +0 -21
- tsadmetrics/base/Metric.py +188 -0
- tsadmetrics/evaluation/Report.py +25 -0
- tsadmetrics/evaluation/Runner.py +253 -0
- tsadmetrics/metrics/Registry.py +141 -0
- tsadmetrics/metrics/__init__.py +2 -0
- tsadmetrics/metrics/spm/PointwiseAucPr.py +62 -0
- tsadmetrics/metrics/spm/PointwiseAucRoc.py +63 -0
- tsadmetrics/metrics/spm/PointwiseFScore.py +86 -0
- tsadmetrics/metrics/spm/PrecisionAtK.py +81 -0
- tsadmetrics/metrics/spm/__init__.py +9 -0
- tsadmetrics/metrics/tem/dpm/DelayThresholdedPointadjustedFScore.py +83 -0
- tsadmetrics/metrics/tem/dpm/LatencySparsityawareFScore.py +76 -0
- tsadmetrics/metrics/tem/dpm/MeanTimeToDetect.py +47 -0
- tsadmetrics/metrics/tem/dpm/NabScore.py +60 -0
- tsadmetrics/metrics/tem/dpm/__init__.py +11 -0
- tsadmetrics/metrics/tem/ptdm/AverageDetectionCount.py +53 -0
- tsadmetrics/metrics/tem/ptdm/DetectionAccuracyInRange.py +66 -0
- tsadmetrics/metrics/tem/ptdm/PointadjustedAtKFScore.py +80 -0
- tsadmetrics/metrics/tem/ptdm/TimeseriesAwareFScore.py +248 -0
- tsadmetrics/metrics/tem/ptdm/TotalDetectedInRange.py +65 -0
- tsadmetrics/metrics/tem/ptdm/WeightedDetectionDifference.py +97 -0
- tsadmetrics/metrics/tem/ptdm/__init__.py +12 -0
- tsadmetrics/metrics/tem/tmem/AbsoluteDetectionDistance.py +48 -0
- tsadmetrics/metrics/tem/tmem/EnhancedTimeseriesAwareFScore.py +252 -0
- tsadmetrics/metrics/tem/tmem/TemporalDistance.py +68 -0
- tsadmetrics/metrics/tem/tmem/__init__.py +9 -0
- tsadmetrics/metrics/tem/tpdm/CompositeFScore.py +104 -0
- tsadmetrics/metrics/tem/tpdm/PointadjustedAucPr.py +123 -0
- tsadmetrics/metrics/tem/tpdm/PointadjustedAucRoc.py +119 -0
- tsadmetrics/metrics/tem/tpdm/PointadjustedFScore.py +96 -0
- tsadmetrics/metrics/tem/tpdm/RangebasedFScore.py +236 -0
- tsadmetrics/metrics/tem/tpdm/SegmentwiseFScore.py +73 -0
- tsadmetrics/metrics/tem/tpdm/__init__.py +12 -0
- tsadmetrics/metrics/tem/tstm/AffiliationbasedFScore.py +68 -0
- tsadmetrics/metrics/tem/tstm/Pate.py +62 -0
- tsadmetrics/metrics/tem/tstm/PateFScore.py +61 -0
- tsadmetrics/metrics/tem/tstm/TimeTolerantFScore.py +85 -0
- tsadmetrics/metrics/tem/tstm/VusPr.py +51 -0
- tsadmetrics/metrics/tem/tstm/VusRoc.py +55 -0
- tsadmetrics/metrics/tem/tstm/__init__.py +15 -0
- tsadmetrics/{_tsadeval/affiliation/_integral_interval.py → utils/functions_affiliation.py} +377 -9
- tsadmetrics/utils/functions_auc.py +393 -0
- tsadmetrics/utils/functions_conversion.py +63 -0
- tsadmetrics/utils/functions_counting_metrics.py +26 -0
- tsadmetrics/{_tsadeval/latency_sparsity_aware.py → utils/functions_latency_sparsity_aware.py} +1 -1
- tsadmetrics/{_tsadeval/nabscore.py → utils/functions_nabscore.py} +15 -1
- tsadmetrics-1.0.0.dist-info/METADATA +69 -0
- tsadmetrics-1.0.0.dist-info/RECORD +99 -0
- tsadmetrics-1.0.0.dist-info/top_level.txt +4 -0
- entorno/bin/activate_this.py +0 -32
- entorno/bin/rst2html.py +0 -23
- entorno/bin/rst2html4.py +0 -26
- entorno/bin/rst2html5.py +0 -33
- entorno/bin/rst2latex.py +0 -26
- entorno/bin/rst2man.py +0 -27
- entorno/bin/rst2odt.py +0 -28
- entorno/bin/rst2odt_prepstyles.py +0 -20
- entorno/bin/rst2pseudoxml.py +0 -23
- entorno/bin/rst2s5.py +0 -24
- entorno/bin/rst2xetex.py +0 -27
- entorno/bin/rst2xml.py +0 -23
- entorno/bin/rstpep2html.py +0 -25
- tests/test_binary.py +0 -946
- tests/test_non_binary.py +0 -450
- tests/test_utils.py +0 -49
- tsadmetrics/_tsadeval/affiliation/_affiliation_zone.py +0 -86
- tsadmetrics/_tsadeval/affiliation/_single_ground_truth_event.py +0 -68
- tsadmetrics/_tsadeval/affiliation/generics.py +0 -135
- tsadmetrics/_tsadeval/affiliation/metrics.py +0 -114
- tsadmetrics/_tsadeval/auc_roc_pr_plot.py +0 -295
- tsadmetrics/_tsadeval/discontinuity_graph.py +0 -109
- tsadmetrics/_tsadeval/eTaPR_pkg/DataManage/File_IO.py +0 -175
- tsadmetrics/_tsadeval/eTaPR_pkg/DataManage/Range.py +0 -50
- tsadmetrics/_tsadeval/eTaPR_pkg/DataManage/Time_Plot.py +0 -184
- tsadmetrics/_tsadeval/eTaPR_pkg/__init__.py +0 -0
- tsadmetrics/_tsadeval/eTaPR_pkg/etapr.py +0 -386
- tsadmetrics/_tsadeval/eTaPR_pkg/tapr.py +0 -362
- tsadmetrics/_tsadeval/metrics.py +0 -698
- tsadmetrics/_tsadeval/prts/__init__.py +0 -0
- tsadmetrics/_tsadeval/prts/base/__init__.py +0 -0
- tsadmetrics/_tsadeval/prts/base/time_series_metrics.py +0 -165
- tsadmetrics/_tsadeval/prts/basic_metrics_ts.py +0 -121
- tsadmetrics/_tsadeval/prts/time_series_metrics/__init__.py +0 -0
- tsadmetrics/_tsadeval/prts/time_series_metrics/fscore.py +0 -61
- tsadmetrics/_tsadeval/prts/time_series_metrics/precision.py +0 -86
- tsadmetrics/_tsadeval/prts/time_series_metrics/precision_recall.py +0 -21
- tsadmetrics/_tsadeval/prts/time_series_metrics/recall.py +0 -85
- tsadmetrics/_tsadeval/tests.py +0 -376
- tsadmetrics/_tsadeval/threshold_plt.py +0 -30
- tsadmetrics/_tsadeval/time_tolerant.py +0 -33
- tsadmetrics/binary_metrics.py +0 -1652
- tsadmetrics/metric_utils.py +0 -98
- tsadmetrics/non_binary_metrics.py +0 -372
- tsadmetrics/scripts/__init__.py +0 -0
- tsadmetrics/scripts/compute_metrics.py +0 -42
- tsadmetrics/utils.py +0 -124
- tsadmetrics/validation.py +0 -35
- tsadmetrics-0.1.17.dist-info/METADATA +0 -54
- tsadmetrics-0.1.17.dist-info/RECORD +0 -66
- tsadmetrics-0.1.17.dist-info/entry_points.txt +0 -2
- tsadmetrics-0.1.17.dist-info/top_level.txt +0 -6
- /tsadmetrics/{_tsadeval → base}/__init__.py +0 -0
- /tsadmetrics/{_tsadeval/affiliation → evaluation}/__init__.py +0 -0
- /tsadmetrics/{_tsadeval/eTaPR_pkg/DataManage → metrics/tem}/__init__.py +0 -0
- /tsadmetrics/{_tsadeval/vus_utils.py → utils/functions_vus.py} +0 -0
- {tsadmetrics-0.1.17.dist-info → tsadmetrics-1.0.0.dist-info}/WHEEL +0 -0
@@ -1,175 +0,0 @@
|
|
1
|
-
from .Range import Range
|
2
|
-
import time
|
3
|
-
import datetime
|
4
|
-
import pandas as pd
|
5
|
-
|
6
|
-
def load_stream_2_range(stream_data: list, normal_label: int, anomaly_label: int, is_range_name: bool) -> list:
|
7
|
-
return_list = []
|
8
|
-
start_id = -1
|
9
|
-
end_id = -1
|
10
|
-
id = 0
|
11
|
-
range_id = 1
|
12
|
-
|
13
|
-
prev_val = -2 #Set prev_val as a value different to normal and anomalous labels
|
14
|
-
|
15
|
-
for val in stream_data:
|
16
|
-
if val == anomaly_label and (prev_val == normal_label or prev_val == -2): #Enter the anomaly range
|
17
|
-
start_id = id
|
18
|
-
elif val == normal_label and prev_val == anomaly_label: #Go out the anomaly range
|
19
|
-
name_buf = ''
|
20
|
-
if is_range_name:
|
21
|
-
name_buf = str(range_id)
|
22
|
-
end_id = id - 1
|
23
|
-
return_list.append(Range(start_id, end_id, name_buf))
|
24
|
-
range_id += 1
|
25
|
-
#start_id = 0
|
26
|
-
|
27
|
-
id += 1
|
28
|
-
prev_val = val
|
29
|
-
if start_id > end_id: #start_id != 0 and start_id != -1: #if an anomaly continues till the last point
|
30
|
-
return_list.append(Range.Range(start_id, id - 1, str(range_id)))
|
31
|
-
|
32
|
-
return return_list
|
33
|
-
|
34
|
-
|
35
|
-
def load_stream_file(filename: str, normal_label: int, anomaly_label: int, is_range_name: bool) -> list:
|
36
|
-
return_list = []
|
37
|
-
start_id = -1
|
38
|
-
end_id = -1
|
39
|
-
id = 0
|
40
|
-
range_id = 1
|
41
|
-
#is_first = True
|
42
|
-
|
43
|
-
prev_val = -2 #Set prev_val as a value different to normal and anomalous labels
|
44
|
-
|
45
|
-
f = open(filename, 'r', encoding='utf-8', newline='')
|
46
|
-
|
47
|
-
for line in f.readlines():
|
48
|
-
val = int(line.strip().split()[0])
|
49
|
-
|
50
|
-
'''
|
51
|
-
#skip the first line
|
52
|
-
if is_first:
|
53
|
-
if val == anomaly_label:
|
54
|
-
start_id = id
|
55
|
-
prev_val = val
|
56
|
-
is_first = False
|
57
|
-
continue
|
58
|
-
'''
|
59
|
-
|
60
|
-
if val == anomaly_label and (prev_val == normal_label or prev_val == -2): #Enter the anomaly range
|
61
|
-
start_id = id
|
62
|
-
elif val == normal_label and prev_val == anomaly_label: #Go out the anomaly range
|
63
|
-
name_buf = ''
|
64
|
-
if is_range_name:
|
65
|
-
name_buf = str(range_id)
|
66
|
-
end_id = id - 1
|
67
|
-
return_list.append(Range.Range(start_id, end_id, name_buf))
|
68
|
-
range_id += 1
|
69
|
-
#start_id = 0
|
70
|
-
|
71
|
-
id += 1
|
72
|
-
prev_val = val
|
73
|
-
f.close()
|
74
|
-
if start_id > end_id: #start_id != 0 and start_id != -1: #if an anomaly continues till the last point
|
75
|
-
return_list.append(Range.Range(start_id, id - 1, str(range_id)))
|
76
|
-
|
77
|
-
return return_list
|
78
|
-
|
79
|
-
def load_range_file(filename: str, time_format: str) -> list:
|
80
|
-
return_list = []
|
81
|
-
#is_first = True
|
82
|
-
|
83
|
-
f = open(filename, 'r', encoding='utf-8', newline='')
|
84
|
-
for line in f.readlines():
|
85
|
-
# skip the first line
|
86
|
-
#if is_first:
|
87
|
-
#is_first = False
|
88
|
-
#continue
|
89
|
-
|
90
|
-
items = line.strip().split(',')
|
91
|
-
if time_format == 'index':
|
92
|
-
first_idx = int(items[0])
|
93
|
-
last_idx = int(items[1])
|
94
|
-
else:
|
95
|
-
first_idx = string_to_unixtime(items[0], time_format)
|
96
|
-
last_idx = string_to_unixtime(items[1], time_format)
|
97
|
-
|
98
|
-
name_buf = ''
|
99
|
-
if len(items) > 2:
|
100
|
-
name_buf = str(items[2])
|
101
|
-
|
102
|
-
return_list.append(Range.Range(first_idx, last_idx, name_buf))
|
103
|
-
f.close()
|
104
|
-
|
105
|
-
for idx in range(1, len(return_list)):
|
106
|
-
if return_list[idx].get_time()[0] <= return_list[idx-1].get_time()[1]:
|
107
|
-
print("Error: ranges ({},{}) and ({},{}) are overlapped in {}".format(return_list[idx-1].get_time()[0],
|
108
|
-
return_list[idx-1].get_time()[1],
|
109
|
-
return_list[idx].get_time()[0],
|
110
|
-
return_list[idx].get_time()[1], filename))
|
111
|
-
exit(0)
|
112
|
-
|
113
|
-
return return_list
|
114
|
-
|
115
|
-
|
116
|
-
def unixtime_to_string(epoch: int, format: str) -> str:
|
117
|
-
return datetime.datetime.fromtimestamp(epoch).strftime(format) #'%Y-%m-%d %I:%M:%S %p'
|
118
|
-
|
119
|
-
|
120
|
-
def string_to_unixtime(timestamp: str, format: str) -> int:
|
121
|
-
return int(time.mktime(datetime.datetime.strptime(timestamp, format).timetuple()))
|
122
|
-
|
123
|
-
|
124
|
-
def save_range_list(filename: str, range_list: list) -> None:
|
125
|
-
f = open(filename, encoding='utf-8', mode='w')
|
126
|
-
for single_range in range_list:
|
127
|
-
first, last = single_range.get_time()
|
128
|
-
f.writelines(str(first)+','+str(last)+','+single_range.get_name()+'\n')
|
129
|
-
f.close()
|
130
|
-
|
131
|
-
# Assume that the first line of input files including the information of file format and its corresponding information
|
132
|
-
# This function handles three types of file format
|
133
|
-
def load_file(filename: str, filetype: str) -> list:
|
134
|
-
assert(filetype == 'range' or filetype == 'stream')
|
135
|
-
|
136
|
-
if filetype == 'stream':
|
137
|
-
return load_stream_file(filename, 1, -1, True)
|
138
|
-
elif filetype == 'range':
|
139
|
-
return load_range_file(filename, 'index')
|
140
|
-
|
141
|
-
|
142
|
-
def make_attack_file(input_files: list, sep: str, label_featname: str, input_normal_label: int, input_anomalous_label: int,
|
143
|
-
output_stream_file: str, output_range_file: str, output_normal_label: int, output_anomalous_label: int) -> None:
|
144
|
-
label = []
|
145
|
-
for an_input_file in input_files:
|
146
|
-
temp_file = pd.read_csv(an_input_file, sep=sep)
|
147
|
-
label += temp_file[label_featname].values.tolist()
|
148
|
-
|
149
|
-
with open(output_stream_file, 'w') as f:
|
150
|
-
for a_label in label:
|
151
|
-
if a_label == input_normal_label:
|
152
|
-
f.write('{}\n'.format(output_normal_label))
|
153
|
-
elif a_label == input_anomalous_label:
|
154
|
-
f.write('{}\n'.format(output_anomalous_label))
|
155
|
-
else:
|
156
|
-
print("There is an unknown label, " + a_label, flush=True)
|
157
|
-
f.close()
|
158
|
-
return
|
159
|
-
|
160
|
-
ranges = load_stream_2_range(label, 0, 1, False)
|
161
|
-
save_range_list(output_range_file, ranges)
|
162
|
-
|
163
|
-
def save_range_2_stream(filename: str, range_list: list, last_idx: int, normal_label: int, anomalous_label: int) -> None:
|
164
|
-
f = open(filename, encoding='utf-8', mode='w')
|
165
|
-
range_id = 0
|
166
|
-
for idx in range(last_idx):
|
167
|
-
if idx < range_list[range_id].get_time()[0]:
|
168
|
-
f.writelines('{}\n'.format(normal_label))
|
169
|
-
elif range_list[range_id].get_time()[0] <= idx <= range_list[range_id].get_time()[1]:
|
170
|
-
f.writelines('{}\n'.format(anomalous_label))
|
171
|
-
else:
|
172
|
-
f.writelines('{}\n'.format(normal_label))
|
173
|
-
if range_id < len(range_list) - 1:
|
174
|
-
range_id += 1
|
175
|
-
f.close()
|
@@ -1,50 +0,0 @@
|
|
1
|
-
# To store a single anomaly
|
2
|
-
class Range:
|
3
|
-
def __init__(self, first, last, name):
|
4
|
-
self._first_timestamp = first
|
5
|
-
self._last_timestamp = last
|
6
|
-
self._name = name
|
7
|
-
|
8
|
-
def set_time(self, first, last):
|
9
|
-
self._first_timestamp = first
|
10
|
-
self._last_timestamp = last
|
11
|
-
|
12
|
-
def get_time(self):
|
13
|
-
return self._first_timestamp, self._last_timestamp
|
14
|
-
|
15
|
-
def set_name(self, str):
|
16
|
-
self._name = str
|
17
|
-
|
18
|
-
def get_name(self):
|
19
|
-
return self._name
|
20
|
-
|
21
|
-
def get_len(self):
|
22
|
-
return self._last_timestamp - self._first_timestamp + 1
|
23
|
-
|
24
|
-
def __eq__(self, other):
|
25
|
-
return self._first_timestamp == other.get_time()[0] and self._last_timestamp == other.get_time()[1]
|
26
|
-
|
27
|
-
def distance(self, other_range) -> int:
|
28
|
-
if min(self._last_timestamp, other_range.get_time()[1]) - max(self._first_timestamp, other_range.get_time()[0]) > 0:
|
29
|
-
return 0
|
30
|
-
else:
|
31
|
-
return min(abs(self._first_timestamp - other_range.get_time()[1]),
|
32
|
-
abs(self._last_timestamp - other_range.get_time()[0]))
|
33
|
-
|
34
|
-
def compare(self, other_range) -> int:
|
35
|
-
if min(self._last_timestamp, other_range.get_time()[1]) - max(self._first_timestamp, other_range.get_time()[0]) > 0:
|
36
|
-
return 0
|
37
|
-
elif self._last_timestamp - other_range.get_time()[0] < 0:
|
38
|
-
return -1
|
39
|
-
else:
|
40
|
-
return 1
|
41
|
-
|
42
|
-
def stream_2_ranges(self, prediction_stream: list) -> list:
|
43
|
-
result = []
|
44
|
-
for i in range(len(prediction_stream)-1):
|
45
|
-
start_time = 0
|
46
|
-
if prediction_stream[i] == 0 and prediction_stream[i+1] == 1:
|
47
|
-
start_time = i+1
|
48
|
-
elif prediction_stream[i] == 1 and prediction_stream[i+1] == 0:
|
49
|
-
result.append(Range(start_time, i, ''))
|
50
|
-
return result
|
@@ -1,184 +0,0 @@
|
|
1
|
-
import numpy as np
|
2
|
-
import cv2 as cv
|
3
|
-
from copy import deepcopy
|
4
|
-
import pathlib
|
5
|
-
|
6
|
-
def convert_index(org_index, max_index, graph_width, margin_left):
|
7
|
-
return round(float(org_index/max_index)*graph_width+margin_left)
|
8
|
-
|
9
|
-
def draw_csv(ranges, img, h_floor, h_ceiling, color, max_index, graph_width, margin_left):
|
10
|
-
for a_range in ranges:
|
11
|
-
start_time = convert_index(a_range.get_time()[0], max_index, graph_width, margin_left)
|
12
|
-
end_time = convert_index(a_range.get_time()[1], max_index, graph_width, margin_left)
|
13
|
-
cv.rectangle(img, (start_time, h_floor), (end_time, h_ceiling), color, thickness=-1)
|
14
|
-
|
15
|
-
def draw_csv_range(ranges, img, h_floor, h_ceiling, color, start, end):
|
16
|
-
for a_range in ranges:
|
17
|
-
if a_range.get_time()[0] <= end or a_range.get_time()[1] >= start:
|
18
|
-
cv.rectangle(img, (a_range.get_time()[0]-start+10, h_floor), (a_range.get_time()[1]-start+10, h_ceiling), color, thickness=-1)
|
19
|
-
|
20
|
-
def shift_ranges(ranges, first_idx):
|
21
|
-
for a_range in ranges:
|
22
|
-
a_range.set_time(a_range.get_time()[0] - first_idx, a_range.get_time()[1] - first_idx)
|
23
|
-
|
24
|
-
def draw_graphs(anomalies, predictions, how_show: str):
|
25
|
-
method_list = [ 'Anomalies', 'Predictions' ]
|
26
|
-
anomalies = deepcopy(anomalies)
|
27
|
-
predictions = deepcopy(predictions)
|
28
|
-
first_idx = min(anomalies[0].get_time()[0]-100, predictions[0].get_time()[0])
|
29
|
-
last_idx = max(anomalies[-1].get_time()[1], predictions[-1].get_time()[1])
|
30
|
-
marginal_idx = int(float(last_idx-first_idx)/100)
|
31
|
-
first_idx -= marginal_idx
|
32
|
-
shift_ranges(anomalies, first_idx)
|
33
|
-
shift_ranges(predictions, first_idx)
|
34
|
-
ranges_list = [ anomalies, predictions ]
|
35
|
-
max_index = max(anomalies[-1].get_time()[1], predictions[-1].get_time()[1]) + marginal_idx
|
36
|
-
|
37
|
-
color_list = [(70, 70, 70), #black
|
38
|
-
(60, 76, 203), #red
|
39
|
-
(193, 134, 46), #blue
|
40
|
-
(133, 160, 22), #green
|
41
|
-
(206, 143, 187), #purple
|
42
|
-
(94, 73, 52), # darkblue
|
43
|
-
(63, 208, 244) #yellow
|
44
|
-
]
|
45
|
-
|
46
|
-
margin_left = 10
|
47
|
-
margin_right = 150
|
48
|
-
margin_top = 20
|
49
|
-
margin_bottom = 40
|
50
|
-
|
51
|
-
graph_gap = 20
|
52
|
-
graph_height = 40
|
53
|
-
graph_width = 2000
|
54
|
-
|
55
|
-
n_results = 2
|
56
|
-
|
57
|
-
width = margin_left + graph_width + margin_right
|
58
|
-
height = margin_top + margin_bottom + n_results * (graph_gap + graph_height)
|
59
|
-
bpp = 3
|
60
|
-
|
61
|
-
img = np.ones((height, width, bpp), np.uint8)*255
|
62
|
-
|
63
|
-
img_h = img.shape[0]
|
64
|
-
img_w = img.shape[1]
|
65
|
-
img_bpp = img.shape[2]
|
66
|
-
|
67
|
-
thickness = 1
|
68
|
-
fontsize = 1
|
69
|
-
cv.line(img, (int(margin_left/2), img_h-margin_bottom), (img_w-int(margin_left/2), img_h-margin_bottom), color_list[0], thickness) #x-axis
|
70
|
-
pts = np.array([[img_w-int(margin_left/2), img_h-margin_bottom], [img_w-int(margin_left/2)-7, img_h-margin_bottom+5], [img_w-int(margin_left/2)-7, img_h-margin_bottom-5]], np.int32) #arrow_head
|
71
|
-
pts = pts.reshape((-1, 1, 2))
|
72
|
-
cv.fillPoly(img, [pts], color_list[0])
|
73
|
-
cv.putText(img, 'Relative Index', (img_w-180, img_h-15), cv.FONT_HERSHEY_COMPLEX_SMALL, fontsize, color_list[0], 1, cv.LINE_AA) #x-axis label
|
74
|
-
|
75
|
-
for i in range(margin_left, width-margin_right, int(graph_width/10)):
|
76
|
-
cv.line(img, (i, img_h-margin_bottom+2), (i, img_h-margin_bottom-2), color_list[0], thickness)
|
77
|
-
org_index = str(round((i-10) / graph_width * max_index / 1000))
|
78
|
-
cv.putText(img, org_index+'K', (i-len(org_index)*5, img_h-margin_bottom + 25), cv.FONT_HERSHEY_COMPLEX_SMALL, fontsize, color_list[0], 1, cv.LINE_AA)
|
79
|
-
|
80
|
-
thickness = -1
|
81
|
-
for idx in range(n_results):
|
82
|
-
cv.putText(img, method_list[idx],
|
83
|
-
(width - margin_right + 2, img_h - margin_bottom - graph_gap * (idx+1) - graph_height * idx - 12),
|
84
|
-
cv.FONT_HERSHEY_COMPLEX_SMALL, fontsize, color_list[0], 1, cv.LINE_AA)
|
85
|
-
draw_csv(ranges_list[idx], img, h_floor=img_h - margin_bottom - graph_gap * (idx+1) - graph_height * idx,
|
86
|
-
h_ceiling=img_h - margin_bottom - graph_gap * (idx+1) - graph_height * (idx+1),
|
87
|
-
color=color_list[(idx+1)%len(color_list)],
|
88
|
-
max_index=max_index, graph_width=graph_width, margin_left=margin_left)
|
89
|
-
|
90
|
-
if how_show == 'screen' or how_show == 'all':
|
91
|
-
cv.imshow("drawing", img)
|
92
|
-
if how_show == 'file' or how_show == 'all':
|
93
|
-
cv.imwrite("../../brief_result.png", img)
|
94
|
-
if how_show != 'screen' and how_show != 'all' and how_show != 'file':
|
95
|
-
print('Parameter Error')
|
96
|
-
cv.waitKey(0);
|
97
|
-
|
98
|
-
|
99
|
-
def draw_multi_graphs(anomalies, predictions_list, predictions_name_list, how_show: str):
|
100
|
-
method_list = [ 'Anomalies' ] + predictions_name_list
|
101
|
-
|
102
|
-
anomalies = deepcopy(anomalies)
|
103
|
-
predictions_list = deepcopy(predictions_list)
|
104
|
-
|
105
|
-
first_idx = anomalies[0].get_time()[0]-100
|
106
|
-
last_idx = anomalies[-1].get_time()[1]
|
107
|
-
for single_prediction in predictions_list:
|
108
|
-
first_idx = min(first_idx, single_prediction[0].get_time()[0])
|
109
|
-
last_idx = max(last_idx, single_prediction[-1].get_time()[1])
|
110
|
-
|
111
|
-
marginal_idx = int(float(last_idx-first_idx)/100)
|
112
|
-
first_idx -= marginal_idx
|
113
|
-
shift_ranges(anomalies, first_idx)
|
114
|
-
for single_prediction in predictions_list:
|
115
|
-
shift_ranges(single_prediction, first_idx)
|
116
|
-
|
117
|
-
ranges_list = [ anomalies ] + predictions_list
|
118
|
-
|
119
|
-
max_index = anomalies[-1].get_time()[1]
|
120
|
-
for single_prediction in predictions_list:
|
121
|
-
max_index = max(max_index, single_prediction[-1].get_time()[1])
|
122
|
-
max_index = max_index + marginal_idx
|
123
|
-
|
124
|
-
color_list = [(0, 0, 0), #black
|
125
|
-
(60, 76, 203), #red
|
126
|
-
(193, 134, 46), #blue
|
127
|
-
(133, 160, 22), #green
|
128
|
-
(206, 143, 187), #purple
|
129
|
-
(94, 73, 52), # darkblue
|
130
|
-
(63, 208, 244) #yellow
|
131
|
-
]
|
132
|
-
|
133
|
-
margin_left = 10
|
134
|
-
margin_right = 180
|
135
|
-
margin_top = 20
|
136
|
-
margin_bottom = 40
|
137
|
-
|
138
|
-
graph_gap = 20
|
139
|
-
graph_height = 40
|
140
|
-
graph_width = 2000
|
141
|
-
|
142
|
-
n_results = len(ranges_list)
|
143
|
-
|
144
|
-
width = margin_left + graph_width + margin_right
|
145
|
-
height = margin_top + margin_bottom + n_results * (graph_gap + graph_height)
|
146
|
-
bpp = 3
|
147
|
-
|
148
|
-
img = np.ones((height, width, bpp), np.uint8)*255
|
149
|
-
|
150
|
-
img_h = img.shape[0]
|
151
|
-
img_w = img.shape[1]
|
152
|
-
img_bpp = img.shape[2]
|
153
|
-
|
154
|
-
thickness = 1
|
155
|
-
fontsize = 1.4
|
156
|
-
cv.line(img, (int(margin_left/2), img_h-margin_bottom), (img_w-int(margin_left/2), img_h-margin_bottom), color_list[0], thickness) #x-axis
|
157
|
-
pts = np.array([[img_w-int(margin_left/2), img_h-margin_bottom], [img_w-int(margin_left/2)-7, img_h-margin_bottom+5], [img_w-int(margin_left/2)-7, img_h-margin_bottom-5]], np.int32) #arrow_head
|
158
|
-
pts = pts.reshape((-1, 1, 2))
|
159
|
-
cv.fillPoly(img, [pts], color_list[0])
|
160
|
-
cv.putText(img, 'Relative Index', (img_w-180, img_h-15), cv.FONT_HERSHEY_COMPLEX_SMALL, 1, color_list[0], 1, cv.LINE_AA) #x-axis label
|
161
|
-
|
162
|
-
for i in range(margin_left, width-margin_right, int(graph_width/10)):
|
163
|
-
cv.line(img, (i, img_h-margin_bottom+2), (i, img_h-margin_bottom-2), color_list[0], thickness)
|
164
|
-
org_index = str(round((i-10) / graph_width * max_index / 1000))
|
165
|
-
cv.putText(img, org_index+'K', (i-len(org_index)*5, img_h-margin_bottom + 25), cv.FONT_HERSHEY_COMPLEX_SMALL, 1, color_list[0], 1, cv.LINE_AA)
|
166
|
-
|
167
|
-
thickness = -1
|
168
|
-
for idx in range(n_results):
|
169
|
-
cv.putText(img, method_list[idx],
|
170
|
-
(width - margin_right + 2, img_h - margin_bottom - graph_gap * (idx+1) - graph_height * idx - 12),
|
171
|
-
cv.FONT_HERSHEY_COMPLEX_SMALL, fontsize, color_list[0], 1, cv.LINE_AA)
|
172
|
-
draw_csv(ranges_list[idx], img, h_floor=img_h - margin_bottom - graph_gap * (idx+1) - graph_height * idx,
|
173
|
-
h_ceiling=img_h - margin_bottom - graph_gap * (idx+1) - graph_height * (idx+1),
|
174
|
-
color=color_list[(idx+1)%len(color_list)],
|
175
|
-
max_index=max_index, graph_width=graph_width, margin_left=margin_left)
|
176
|
-
|
177
|
-
if how_show == 'screen' or how_show == 'all':
|
178
|
-
cv.imshow("drawing", img)
|
179
|
-
if how_show == 'file' or how_show == 'all':
|
180
|
-
print("The file is saved at " + str(pathlib.Path(__file__).parent.absolute()))
|
181
|
-
cv.imwrite("./brief_result.png", img)
|
182
|
-
if how_show != 'screen' and how_show != 'all' and how_show != 'file':
|
183
|
-
print('Parameter Error')
|
184
|
-
cv.waitKey(0);
|
File without changes
|