radnn 0.0.8__py3-none-any.whl → 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- radnn/__init__.py +5 -5
- radnn/benchmark/__init__.py +1 -0
- radnn/benchmark/latency.py +55 -0
- radnn/core.py +146 -2
- radnn/data/__init__.py +5 -10
- radnn/data/dataset_base.py +100 -260
- radnn/data/dataset_base_legacy.py +280 -0
- radnn/data/errors.py +32 -0
- radnn/data/sample_preprocessor.py +58 -0
- radnn/data/sample_set.py +203 -90
- radnn/data/sample_set_kind.py +126 -0
- radnn/data/sequence_dataset.py +25 -30
- radnn/data/structs/__init__.py +1 -0
- radnn/data/structs/tree.py +322 -0
- radnn/data_beta/__init__.py +12 -0
- radnn/{data → data_beta}/data_feed.py +1 -1
- radnn/data_beta/dataset_base.py +337 -0
- radnn/data_beta/sample_set.py +166 -0
- radnn/data_beta/sequence_dataset.py +134 -0
- radnn/data_beta/structures/__init__.py +2 -0
- radnn/data_beta/structures/dictionary.py +41 -0
- radnn/{data → data_beta}/tf_classification_data_feed.py +5 -2
- radnn/errors.py +10 -2
- radnn/experiment/__init__.py +2 -0
- radnn/experiment/identification.py +7 -0
- radnn/experiment/ml_experiment.py +7 -2
- radnn/experiment/ml_experiment_log.py +47 -0
- radnn/images/image_processor.py +4 -1
- radnn/learn/__init__.py +0 -7
- radnn/learn/keras/__init__.py +4 -0
- radnn/learn/{state → keras}/keras_best_state_saver.py +5 -1
- radnn/learn/{learning_algorithm.py → keras/keras_learning_algorithm.py} +5 -9
- radnn/learn/{keras_learning_rate_scheduler.py → keras/keras_learning_rate_scheduler.py} +4 -1
- radnn/learn/{keras_optimization_algorithm.py → keras/keras_optimization_combo.py} +7 -3
- radnn/learn/torch/__init__.py +3 -0
- radnn/learn/torch/ml_model_freezer.py +330 -0
- radnn/learn/torch/ml_trainer.py +461 -0
- radnn/learn/torch/staircase_lr_scheduler.py +21 -0
- radnn/ml_system.py +68 -52
- radnn/models/__init__.py +5 -0
- radnn/models/cnn/__init__.py +0 -0
- radnn/models/cnn/cnn_stem_setup.py +35 -0
- radnn/models/model_factory.py +85 -0
- radnn/models/model_hyperparams.py +128 -0
- radnn/models/model_info.py +91 -0
- radnn/plots/plot_learning_curve.py +19 -8
- radnn/system/__init__.py +1 -0
- radnn/system/files/__init__.py +1 -1
- radnn/system/files/csvfile.py +37 -5
- radnn/system/files/filelist.py +30 -0
- radnn/system/files/fileobject.py +11 -1
- radnn/system/files/imgfile.py +1 -1
- radnn/system/files/jsonfile.py +37 -9
- radnn/system/files/picklefile.py +3 -3
- radnn/system/files/textfile.py +39 -10
- radnn/system/files/zipfile.py +96 -0
- radnn/system/filestore.py +147 -47
- radnn/system/filesystem.py +3 -3
- radnn/test/__init__.py +1 -0
- radnn/test/tensor_hash.py +130 -0
- radnn/utils.py +16 -2
- radnn-0.1.0.dist-info/METADATA +30 -0
- radnn-0.1.0.dist-info/RECORD +99 -0
- {radnn-0.0.8.dist-info → radnn-0.1.0.dist-info}/WHEEL +1 -1
- {radnn-0.0.8.dist-info → radnn-0.1.0.dist-info/licenses}/LICENSE.txt +1 -1
- radnn/learn/state/__init__.py +0 -4
- radnn-0.0.8.dist-info/METADATA +0 -58
- radnn-0.0.8.dist-info/RECORD +0 -70
- /radnn/{data → data_beta}/dataset_folder.py +0 -0
- /radnn/{data → data_beta}/image_dataset.py +0 -0
- /radnn/{data → data_beta}/image_dataset_files.py +0 -0
- /radnn/{data → data_beta}/preprocess/__init__.py +0 -0
- /radnn/{data → data_beta}/preprocess/normalizer.py +0 -0
- /radnn/{data → data_beta}/preprocess/standardizer.py +0 -0
- /radnn/{data → data_beta}/subset_type.py +0 -0
- {radnn-0.0.8.dist-info → radnn-0.1.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
# ......................................................................................
|
|
2
|
+
# MIT License
|
|
3
|
+
|
|
4
|
+
# Copyright (c) 2019-2025 Pantelis I. Kaplanoglou
|
|
5
|
+
|
|
6
|
+
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
7
|
+
# of this software and associated documentation files (the "Software"), to deal
|
|
8
|
+
# in the Software without restriction, including without limitation the rights
|
|
9
|
+
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
10
|
+
# copies of the Software, and to permit persons to whom the Software is
|
|
11
|
+
# furnished to do so, subject to the following conditions:
|
|
12
|
+
|
|
13
|
+
# The above copyright notice and this permission notice shall be included in all
|
|
14
|
+
# copies or substantial portions of the Software.
|
|
15
|
+
|
|
16
|
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
17
|
+
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
18
|
+
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
19
|
+
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
20
|
+
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
21
|
+
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
22
|
+
# SOFTWARE.
|
|
23
|
+
|
|
24
|
+
# ......................................................................................
|
|
25
|
+
from .dataset_base import DataSetBase
|
|
26
|
+
from .subset_type import SubsetType
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class SampleSet(object):
|
|
31
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
32
|
+
def __init__(self, subset_type="custom", has_ids=False):
|
|
33
|
+
self.subset_type: SubsetType = SubsetType(subset_type)
|
|
34
|
+
self.parent_dataset = None
|
|
35
|
+
|
|
36
|
+
self.has_ids = has_ids
|
|
37
|
+
|
|
38
|
+
self.ids = None
|
|
39
|
+
self.samples = None
|
|
40
|
+
self.sample_count = None
|
|
41
|
+
self.labels = None
|
|
42
|
+
|
|
43
|
+
self._step = 1
|
|
44
|
+
self._iter_start_pos = 0
|
|
45
|
+
self._iter_counter = 0
|
|
46
|
+
|
|
47
|
+
self.feed = None
|
|
48
|
+
|
|
49
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
50
|
+
@property
|
|
51
|
+
def has_labels(self):
|
|
52
|
+
return self.labels is not None
|
|
53
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
54
|
+
@property
|
|
55
|
+
def data_tuple(self):
|
|
56
|
+
if self.has_ids:
|
|
57
|
+
if self.labels is None:
|
|
58
|
+
return (self.ids, self.samples)
|
|
59
|
+
else:
|
|
60
|
+
return (self.ids, self.samples, self.labels)
|
|
61
|
+
else:
|
|
62
|
+
if self.labels is None:
|
|
63
|
+
return self.samples
|
|
64
|
+
else:
|
|
65
|
+
return (self.ids, self.samples, self.labels)
|
|
66
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
67
|
+
def subset_of(self, parent_dataset: DataSetBase):
|
|
68
|
+
self.parent_dataset = parent_dataset
|
|
69
|
+
if self.parent_dataset is not None:
|
|
70
|
+
if self.subset_type.is_training_set:
|
|
71
|
+
if self.parent_dataset.ts_samples is not None:
|
|
72
|
+
self.parent_dataset.ts = self
|
|
73
|
+
self.ids = self.parent_dataset.ts_sample_ids
|
|
74
|
+
self.samples = self.parent_dataset.ts_samples
|
|
75
|
+
self.sample_count = self.parent_dataset.ts_sample_count
|
|
76
|
+
self.labels = self.parent_dataset.ts_labels
|
|
77
|
+
elif self.subset_type.is_validation_set:
|
|
78
|
+
if self.parent_dataset.vs_samples is not None:
|
|
79
|
+
self.parent_dataset.vs = self
|
|
80
|
+
self.ids = self.parent_dataset.vs_sample_ids
|
|
81
|
+
self.samples = self.parent_dataset.vs_samples
|
|
82
|
+
self.sample_count = self.parent_dataset.vs_sample_count
|
|
83
|
+
self.labels = self.parent_dataset.vs_labels
|
|
84
|
+
elif self.subset_type.is_unknown_test_set:
|
|
85
|
+
if self.parent_dataset.ut_samples is not None:
|
|
86
|
+
self.parent_dataset.ut = self
|
|
87
|
+
self.ids = self.parent_dataset.ut_sample_ids
|
|
88
|
+
self.samples = self.parent_dataset.ut_samples
|
|
89
|
+
self.sample_count = self.parent_dataset.ut_sample_count
|
|
90
|
+
self.labels = self.parent_dataset.ut_labels
|
|
91
|
+
|
|
92
|
+
self.has_ids = self.ids is not None
|
|
93
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
94
|
+
'''
|
|
95
|
+
def create_feed(self, has_ids=False):
|
|
96
|
+
self.has_ids = has_ids
|
|
97
|
+
if is_tensorflow_installed:
|
|
98
|
+
import tensorflow as tf
|
|
99
|
+
|
|
100
|
+
if has_ids:
|
|
101
|
+
self.feed = tf.data.Dataset.from_tensor_slices((self.ids, self.samples, self.labels))
|
|
102
|
+
else:
|
|
103
|
+
self.feed = tf.data.Dataset.from_tensor_slices((self.samples, self.labels))
|
|
104
|
+
|
|
105
|
+
self.feed = self.feed.map(preprocess_tf, num_parallel_calls=8)
|
|
106
|
+
|
|
107
|
+
if (self.subset_type == "training") or (self.subset_type == "train") or (self.subset_type == "ts"):
|
|
108
|
+
# -----------------------------------------------------------------------------------
|
|
109
|
+
def preprocess_tf(self, sample_pack):
|
|
110
|
+
|
|
111
|
+
import tensorflow as tf
|
|
112
|
+
|
|
113
|
+
if self.has_ids:
|
|
114
|
+
nId, nSample, nLabel = sample_pack
|
|
115
|
+
else:
|
|
116
|
+
nSample, nLabel = sample_pack
|
|
117
|
+
|
|
118
|
+
tImage = tf.cast(p_tImageInVS, tf.float32) # //[BF] overflow of standardization
|
|
119
|
+
tNormalizedImage = self.normalizeImage(tImage)
|
|
120
|
+
|
|
121
|
+
tTargetOneHot = tf.one_hot(p_tLabelInVS, self.ClassCount)
|
|
122
|
+
|
|
123
|
+
return tNormalizedImage, tTargetOneHot
|
|
124
|
+
'''
|
|
125
|
+
|
|
126
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
127
|
+
def __iter__(self):
|
|
128
|
+
self._iter_counter = 0
|
|
129
|
+
if self.ids is not None:
|
|
130
|
+
if self.labels is not None:
|
|
131
|
+
yield from self._generator_for_supervised_with_ids()
|
|
132
|
+
else:
|
|
133
|
+
yield from self._generator_for_unsupervised_with_ids()
|
|
134
|
+
else:
|
|
135
|
+
if self.labels is not None:
|
|
136
|
+
yield from self._generator_for_supervised()
|
|
137
|
+
else:
|
|
138
|
+
yield from self._generator_for_unsupervised()
|
|
139
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
140
|
+
def _generator_for_supervised(self):
|
|
141
|
+
nIndex = self._iter_start_pos
|
|
142
|
+
while self._iter_counter < self.sample_count:
|
|
143
|
+
yield (self.samples[nIndex, ...], self.labels[nIndex, ...])
|
|
144
|
+
nIndex += self._step
|
|
145
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
146
|
+
def _generator_for_unsupervised(self):
|
|
147
|
+
nIndex = self._iter_start_pos
|
|
148
|
+
while self._iter_counter < self.sample_count:
|
|
149
|
+
yield self.samples[nIndex, ...]
|
|
150
|
+
nIndex += self._step
|
|
151
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
152
|
+
def _generator_for_supervised_with_ids(self):
|
|
153
|
+
nIndex = self._iter_start_pos
|
|
154
|
+
while self._iter_counter < self.sample_count:
|
|
155
|
+
yield (self.ids[nIndex], self.samples[nIndex, ...], self.labels[nIndex, ...])
|
|
156
|
+
nIndex += self._step
|
|
157
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
158
|
+
def _generator_for_unsupervised_with_ids(self):
|
|
159
|
+
nIndex = self._iter_start_pos
|
|
160
|
+
while self._iter_counter < self.sample_count:
|
|
161
|
+
yield (self.ids[nIndex], self.samples[nIndex, ...])
|
|
162
|
+
nIndex += self._step
|
|
163
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
# ......................................................................................
|
|
2
|
+
# MIT License
|
|
3
|
+
|
|
4
|
+
# Copyright (c) 2022-2025 Pantelis I. Kaplanoglou
|
|
5
|
+
|
|
6
|
+
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
7
|
+
# of this software and associated documentation files (the "Software"), to deal
|
|
8
|
+
# in the Software without restriction, including without limitation the rights
|
|
9
|
+
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
10
|
+
# copies of the Software, and to permit persons to whom the Software is
|
|
11
|
+
# furnished to do so, subject to the following conditions:
|
|
12
|
+
|
|
13
|
+
# The above copyright notice and this permission notice shall be included in all
|
|
14
|
+
# copies or substantial portions of the Software.
|
|
15
|
+
|
|
16
|
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
17
|
+
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
18
|
+
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
19
|
+
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
20
|
+
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
21
|
+
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
22
|
+
# SOFTWARE.
|
|
23
|
+
|
|
24
|
+
# ......................................................................................
|
|
25
|
+
import numpy as np
|
|
26
|
+
from .dataset_base import DataSetBase
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
# ----------------------------------------------------------------------------------------------------------------------
|
|
30
|
+
def generate_sequence_clips(samples, labels, window_size, stride, is_padding_zeros=False):
|
|
31
|
+
nSequenceIndex = 0
|
|
32
|
+
while nSequenceIndex < samples.shape[0]:
|
|
33
|
+
nLabel = labels[nSequenceIndex]
|
|
34
|
+
nPosition = 0
|
|
35
|
+
nSpanPoints = window_size
|
|
36
|
+
if is_padding_zeros:
|
|
37
|
+
nSpanPoints = window_size - 3 * stride
|
|
38
|
+
|
|
39
|
+
nDataPointCount = samples.shape[1]
|
|
40
|
+
while (nPosition + nSpanPoints) <= nDataPointCount:
|
|
41
|
+
if is_padding_zeros and ((nPosition + window_size) >= nDataPointCount):
|
|
42
|
+
nSeqSample = np.zeros((window_size, samples.shape[2]), np.float32)
|
|
43
|
+
nSeqSample[nPosition + window_size - nDataPointCount:, :] = samples[nSequenceIndex, nPosition:, :]
|
|
44
|
+
else:
|
|
45
|
+
nSeqSample = samples[nSequenceIndex, nPosition:nPosition + window_size, :]
|
|
46
|
+
|
|
47
|
+
yield (nSeqSample, nLabel)
|
|
48
|
+
|
|
49
|
+
nPosition += stride
|
|
50
|
+
nSequenceIndex += 1
|
|
51
|
+
# ----------------------------------------------------------------------------------------------------------------------
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class SequenceDataset(DataSetBase):
|
|
58
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
59
|
+
def __init__(self, name, fs, clip_window_size=None, clip_stride=None, is_padding_zeros=False, random_seed=None, is_classification=True):
|
|
60
|
+
super(SequenceDataset, self).__init__(name, fs, random_seed, is_classification)
|
|
61
|
+
self.clip_window_size = clip_window_size
|
|
62
|
+
self.clip_stride = clip_stride
|
|
63
|
+
self.is_padding_zeros = is_padding_zeros
|
|
64
|
+
self.card["clips.window_size"] = self.clip_window_size
|
|
65
|
+
self.card["clips.stride"] = self.clip_stride
|
|
66
|
+
self.card["clips.is_padding_zeros"] = self.is_padding_zeros
|
|
67
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
68
|
+
@property
|
|
69
|
+
def ts_sequence_clips(self):
|
|
70
|
+
return generate_sequence_clips(self.ts_samples, self.ts_labels, self.clip_window_size, self.clip_stride,
|
|
71
|
+
self.is_padding_zeros)
|
|
72
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
73
|
+
@property
|
|
74
|
+
def vs_sequence_clips(self):
|
|
75
|
+
if self.vs_samples is not None:
|
|
76
|
+
return generate_sequence_clips(self.vs_samples, self.vs_labels, self.clip_window_size, self.clip_stride,
|
|
77
|
+
self.is_padding_zeros)
|
|
78
|
+
else:
|
|
79
|
+
return None
|
|
80
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
81
|
+
@property
|
|
82
|
+
def ut_sequence_clips(self):
|
|
83
|
+
if self.ut_samples is not None:
|
|
84
|
+
return generate_sequence_clips(self.ut_samples, self.ut_labels, self.clip_window_size, self.clip_stride,
|
|
85
|
+
self.is_padding_zeros)
|
|
86
|
+
else:
|
|
87
|
+
return None
|
|
88
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
89
|
+
def convert_samples_to_clips(self, clip_window_size=None, clip_stride=None, is_padding_zeros=False):
|
|
90
|
+
if clip_window_size is not None:
|
|
91
|
+
self.clip_window_size = clip_window_size
|
|
92
|
+
if clip_stride is not None:
|
|
93
|
+
self.clip_stride = clip_stride
|
|
94
|
+
if is_padding_zeros and (not self.is_padding_zeros):
|
|
95
|
+
self.is_padding_zeros = is_padding_zeros
|
|
96
|
+
|
|
97
|
+
self.card["clips.window_size"] = self.clip_window_size
|
|
98
|
+
self.card["clips.stride"] = self.clip_stride
|
|
99
|
+
self.card["clips.is_padding_zeros"] = self.is_padding_zeros
|
|
100
|
+
|
|
101
|
+
# Create training set clips
|
|
102
|
+
nClips = []
|
|
103
|
+
nClipLabels = []
|
|
104
|
+
for (nClip, nClipLabel) in self.ts_sequence_clips:
|
|
105
|
+
nClips.append(nClip)
|
|
106
|
+
nClipLabels.append(nClipLabel)
|
|
107
|
+
nClips = np.asarray(nClips)
|
|
108
|
+
nClipLabels = np.asarray(nClipLabels)
|
|
109
|
+
self.assign_training_set(nClips, nClipLabels)
|
|
110
|
+
|
|
111
|
+
# Create validation set clips
|
|
112
|
+
if self.vs_samples is not None:
|
|
113
|
+
nClips = []
|
|
114
|
+
nClipLabels = []
|
|
115
|
+
for (nClip, nClipLabel) in self.vs_sequence_clips:
|
|
116
|
+
nClips.append(nClip)
|
|
117
|
+
nClipLabels.append(nClipLabel)
|
|
118
|
+
nClips = np.asarray(nClips)
|
|
119
|
+
nClipLabels = np.asarray(nClipLabels)
|
|
120
|
+
self.assign_validation_set(nClips, nClipLabels)
|
|
121
|
+
|
|
122
|
+
# Create unknown test set clips
|
|
123
|
+
if self.ut_samples is not None:
|
|
124
|
+
nClips = []
|
|
125
|
+
nClipLabels = []
|
|
126
|
+
for (nClip, nClipLabel) in self.ut_sequence_clips:
|
|
127
|
+
nClips.append(nClip)
|
|
128
|
+
nClipLabels.append(nClipLabel)
|
|
129
|
+
nClips = np.asarray(nClips)
|
|
130
|
+
nClipLabels = np.asarray(nClipLabels)
|
|
131
|
+
self.assign_unknown_test_set(nClips, nClipLabels)
|
|
132
|
+
|
|
133
|
+
return self
|
|
134
|
+
# --------------------------------------------------------------------------------------
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
# ......................................................................................
|
|
2
|
+
# MIT License
|
|
3
|
+
|
|
4
|
+
# Copyright (c) 2013 Andy Hayden
|
|
5
|
+
|
|
6
|
+
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
7
|
+
# of this software and associated documentation files (the "Software"), to deal
|
|
8
|
+
# in the Software without restriction, including without limitation the rights
|
|
9
|
+
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
10
|
+
# copies of the Software, and to permit persons to whom the Software is
|
|
11
|
+
# furnished to do so, subject to the following conditions:
|
|
12
|
+
|
|
13
|
+
# The above copyright notice and this permission notice shall be included in all
|
|
14
|
+
# copies or substantial portions of the Software.
|
|
15
|
+
|
|
16
|
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
17
|
+
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
18
|
+
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
19
|
+
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
20
|
+
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
21
|
+
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
22
|
+
# SOFTWARE.
|
|
23
|
+
|
|
24
|
+
# ......................................................................................
|
|
25
|
+
# Based on the blog post: https://andyhayden.com/2013/dotable-dictionaries
|
|
26
|
+
class Dictionary(dict):
|
|
27
|
+
__getattr__ = dict.__getitem__
|
|
28
|
+
|
|
29
|
+
def __init__(self, d):
|
|
30
|
+
super().__init__()
|
|
31
|
+
self.update(**dict((k, self.parse(v))
|
|
32
|
+
for k, v in d.iteritems()))
|
|
33
|
+
|
|
34
|
+
@classmethod
|
|
35
|
+
def parse(cls, v):
|
|
36
|
+
if isinstance(v, dict):
|
|
37
|
+
return cls(v)
|
|
38
|
+
elif isinstance(v, list):
|
|
39
|
+
return [cls.parse(i) for i in v]
|
|
40
|
+
else:
|
|
41
|
+
return v
|
|
@@ -28,9 +28,12 @@
|
|
|
28
28
|
|
|
29
29
|
# ......................................................................................
|
|
30
30
|
|
|
31
|
-
|
|
31
|
+
from radnn.core import RequiredLibs
|
|
32
|
+
oReqs = RequiredLibs()
|
|
33
|
+
if oReqs.is_tensorflow_installed:
|
|
34
|
+
import tensorflow as tf
|
|
32
35
|
from radnn import mlsys
|
|
33
|
-
from radnn.
|
|
36
|
+
from radnn.data_beta.preprocess import Normalizer, Standardizer
|
|
34
37
|
from .data_feed import DataFeed
|
|
35
38
|
|
|
36
39
|
class TFClassificationDataFeed(DataFeed):
|
radnn/errors.py
CHANGED
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
# ______________________________________________________________________________________
|
|
7
7
|
# ......................................................................................
|
|
8
8
|
|
|
9
|
-
# Copyright (c) 2018-
|
|
9
|
+
# Copyright (c) 2018-2026 Pantelis I. Kaplanoglou
|
|
10
10
|
|
|
11
11
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
12
12
|
# of this software and associated documentation files (the "Software"), to deal
|
|
@@ -28,4 +28,12 @@
|
|
|
28
28
|
|
|
29
29
|
# .......................................................................................
|
|
30
30
|
class Errors:
|
|
31
|
-
MLSYS_NO_FILESYS = "No file system defined for the machine learning system. Create the object and assign the mlsys.filesys property."
|
|
31
|
+
MLSYS_NO_FILESYS = "No file system defined for the machine learning system. Create the object and assign the mlsys.filesys property."
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
HPARAMS_DATA_INPUT_DIMS = "Invalid data hyperparameters: The sample rank dimensions for the model's input tensor have not been defined."
|
|
35
|
+
|
|
36
|
+
FILESTORE_DATAFILE_KIND_NOT_SUPPORTED = "Data file kind {%s} is not supported."
|
|
37
|
+
TRAINER_LR_SCHEDULER_INVALID_SETUP = "The learning rate scheduler step list is missing or invalid."
|
|
38
|
+
TRAINER_LR_SCHEDULER_INVALID_MILESTONE_SETUP = "The learning rate change milestone list is missing or invalid."
|
|
39
|
+
TRAINER_LR_SCHEDULER_UNSUPPORTED = "The learning rate scheduler is not supported."
|
radnn/experiment/__init__.py
CHANGED
|
@@ -1,4 +1,6 @@
|
|
|
1
1
|
from .ml_experiment_config import get_experiment_code, get_experiment_code_ex, experiment_number_and_variation, experiment_code_and_timestamp
|
|
2
2
|
from .ml_experiment_config import MLExperimentConfig
|
|
3
3
|
from .ml_experiment_env import MLExperimentEnv
|
|
4
|
+
from .ml_experiment_log import MLExperimentLog
|
|
5
|
+
from .identification import experiment_fold_number, experiment_name_with_fold
|
|
4
6
|
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
def experiment_fold_number(hyperparams: dict):
|
|
2
|
+
return hyperparams.get("Experiment.FoldNumber", hyperparams.get("Dataset.FoldNumber", 0))
|
|
3
|
+
|
|
4
|
+
def experiment_name_with_fold(hyperparams: dict):
|
|
5
|
+
nFoldNumber = hyperparams.get("Experiment.FoldNumber", hyperparams.get("Dataset.FoldNumber", 0))
|
|
6
|
+
sExperimentName = hyperparams.get("Experiment.Name", "noname")
|
|
7
|
+
return f"{sExperimentName}.{nFoldNumber}"
|
|
@@ -30,10 +30,15 @@
|
|
|
30
30
|
import os
|
|
31
31
|
import numpy as np
|
|
32
32
|
from datetime import datetime
|
|
33
|
-
|
|
33
|
+
|
|
34
|
+
from radnn.core import RequiredLibs
|
|
35
|
+
oReqs = RequiredLibs()
|
|
36
|
+
if oReqs.is_tensorflow_installed:
|
|
37
|
+
from tensorflow import keras
|
|
38
|
+
|
|
34
39
|
from radnn import MLSystem, FileSystem, FileStore, Errors
|
|
35
40
|
from radnn.data import DataSetBase
|
|
36
|
-
from radnn.learn import
|
|
41
|
+
from radnn.learn.keras import KLearningAlgorithm
|
|
37
42
|
|
|
38
43
|
from radnn.plots import PlotLearningCurve
|
|
39
44
|
from radnn.evaluation import EvaluateClassification
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
from radnn.system.filesystem import FileStore
|
|
2
|
+
class MLExperimentLog:
|
|
3
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
4
|
+
def __init__(self, filename: str, experiment_info: dict | None = None):
|
|
5
|
+
self.filename = filename
|
|
6
|
+
if experiment_info is None:
|
|
7
|
+
experiment_info = {}
|
|
8
|
+
self.experiment_info = experiment_info
|
|
9
|
+
self.logs = { "experiment": experiment_info,
|
|
10
|
+
"epoch": [],
|
|
11
|
+
"epoch_time": [],
|
|
12
|
+
"train_step_loss": [],
|
|
13
|
+
"train_step_accuracy": [],
|
|
14
|
+
"train_loss": [],
|
|
15
|
+
"train_accuracy": [],
|
|
16
|
+
"val_loss": [],
|
|
17
|
+
"val_accuracy": [],
|
|
18
|
+
}
|
|
19
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
20
|
+
def assign_series(self, is_autoinit=False, **kwargs):
|
|
21
|
+
if is_autoinit:
|
|
22
|
+
for key, value in kwargs.items():
|
|
23
|
+
if key not in self.logs:
|
|
24
|
+
self.logs[key] = []
|
|
25
|
+
|
|
26
|
+
for key, value in kwargs.items():
|
|
27
|
+
self.logs[key] = value
|
|
28
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
29
|
+
def append(self, is_autoinit=False, **kwargs):
|
|
30
|
+
if is_autoinit:
|
|
31
|
+
for key, value in kwargs.items():
|
|
32
|
+
if key not in self.logs:
|
|
33
|
+
self.logs[key] = []
|
|
34
|
+
|
|
35
|
+
for key, value in kwargs.items():
|
|
36
|
+
self.logs[key].append(value)
|
|
37
|
+
return self
|
|
38
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
39
|
+
def load(self, experiment_fs: FileStore):
|
|
40
|
+
self.logs = experiment_fs.json.load(self.filename)
|
|
41
|
+
return self
|
|
42
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
43
|
+
def save(self, experiment_fs: FileStore):
|
|
44
|
+
experiment_fs.json.save(self.logs, self.filename)
|
|
45
|
+
return self
|
|
46
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
47
|
+
|
radnn/images/image_processor.py
CHANGED
radnn/learn/__init__.py
CHANGED
|
@@ -1,5 +1,9 @@
|
|
|
1
1
|
import os
|
|
2
|
-
|
|
2
|
+
|
|
3
|
+
from radnn.core import RequiredLibs
|
|
4
|
+
oReqs = RequiredLibs()
|
|
5
|
+
if oReqs.is_tensorflow_installed:
|
|
6
|
+
import tensorflow.keras as ker
|
|
3
7
|
|
|
4
8
|
class KBestStateSaver(object):
|
|
5
9
|
# -----------------------------------------------------------------------------------
|
|
@@ -1,10 +1,6 @@
|
|
|
1
|
-
from radnn import
|
|
1
|
+
from radnn.learn.keras.keras_optimization_combo import KOptimizationCombo
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
if mlsys.is_tensorflow_installed:
|
|
5
|
-
from .keras_optimization_algorithm import KOptimizationAlgorithm
|
|
6
|
-
|
|
7
|
-
class LearningAlgorithm(object):
|
|
3
|
+
class KLearningAlgorithm(object):
|
|
8
4
|
# -----------------------------------------------------------------------------------
|
|
9
5
|
def __init__(self, config, is_verbose=True):
|
|
10
6
|
self.config = config
|
|
@@ -17,7 +13,7 @@ class LearningAlgorithm(object):
|
|
|
17
13
|
def optimizer(self):
|
|
18
14
|
oResult = None
|
|
19
15
|
if self._implementation is not None:
|
|
20
|
-
if isinstance(self._implementation,
|
|
16
|
+
if isinstance(self._implementation, KOptimizationCombo):
|
|
21
17
|
oResult = self._implementation.optimizer
|
|
22
18
|
return oResult
|
|
23
19
|
# -----------------------------------------------------------------------------------
|
|
@@ -25,12 +21,12 @@ class LearningAlgorithm(object):
|
|
|
25
21
|
def callbacks(self):
|
|
26
22
|
oResult = None
|
|
27
23
|
if self._implementation is not None:
|
|
28
|
-
if isinstance(self._implementation,
|
|
24
|
+
if isinstance(self._implementation, KOptimizationCombo):
|
|
29
25
|
oResult = self._implementation.callbacks
|
|
30
26
|
return oResult
|
|
31
27
|
# -----------------------------------------------------------------------------------
|
|
32
28
|
def prepare(self):
|
|
33
29
|
if mlsys.is_tensorflow_installed:
|
|
34
|
-
self._implementation =
|
|
30
|
+
self._implementation = KOptimizationCombo(self.config, self.is_verbose)
|
|
35
31
|
return self
|
|
36
32
|
# -----------------------------------------------------------------------------------
|
|
@@ -1,4 +1,7 @@
|
|
|
1
|
-
|
|
1
|
+
from radnn.core import RequiredLibs
|
|
2
|
+
oReqs = RequiredLibs()
|
|
3
|
+
if oReqs.is_tensorflow_installed:
|
|
4
|
+
import tensorflow.keras as ker
|
|
2
5
|
|
|
3
6
|
class KLearningRateScheduler(ker.callbacks.LearningRateScheduler):
|
|
4
7
|
# -----------------------------------------------------------------------------------
|
|
@@ -1,7 +1,11 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
from radnn.core import RequiredLibs
|
|
2
|
+
oReqs = RequiredLibs()
|
|
3
|
+
if oReqs.is_tensorflow_installed:
|
|
4
|
+
import tensorflow.keras as ker
|
|
3
5
|
|
|
4
|
-
|
|
6
|
+
from radnn.learn.keras.keras_learning_rate_scheduler import KLearningRateScheduler
|
|
7
|
+
|
|
8
|
+
class KOptimizationCombo(object):
|
|
5
9
|
# -----------------------------------------------------------------------------------
|
|
6
10
|
def __init__(self, config, is_verbose=True):
|
|
7
11
|
self.config = config
|