radnn 0.0.7.3__tar.gz → 0.0.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {radnn-0.0.7.3 → radnn-0.0.9}/PKG-INFO +4 -25
- {radnn-0.0.7.3 → radnn-0.0.9}/pyproject.toml +3 -4
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/__init__.py +3 -1
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/data/__init__.py +2 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/data/data_feed.py +5 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/data/dataset_base.py +17 -5
- radnn-0.0.9/src/radnn/data/dataset_folder.py +55 -0
- radnn-0.0.9/src/radnn/data/image_dataset_files.py +175 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/data/subset_type.py +8 -2
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/data/tf_classification_data_feed.py +22 -6
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/experiment/ml_experiment_config.py +54 -29
- radnn-0.0.9/src/radnn/images/__init__.py +2 -0
- radnn-0.0.9/src/radnn/images/colors.py +28 -0
- radnn-0.0.9/src/radnn/images/image_processor.py +513 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/ml_system.py +1 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/plots/plot_auto_multi_image.py +6 -5
- radnn-0.0.9/src/radnn/stats/__init__.py +1 -0
- radnn-0.0.9/src/radnn/stats/descriptive_stats.py +45 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/system/files/__init__.py +1 -0
- radnn-0.0.9/src/radnn/system/files/filelist.py +40 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/system/files/jsonfile.py +3 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/system/files/textfile.py +29 -6
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/system/filestore.py +26 -10
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/system/filesystem.py +1 -1
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/system/hosts/windows_host.py +10 -0
- radnn-0.0.9/src/radnn/system/threads/__init__.py +5 -0
- radnn-0.0.9/src/radnn/system/threads/semaphore_lock.py +58 -0
- radnn-0.0.9/src/radnn/system/threads/thread_context.py +175 -0
- radnn-0.0.9/src/radnn/system/threads/thread_safe_queue.py +163 -0
- radnn-0.0.9/src/radnn/system/threads/thread_safe_string_collection.py +66 -0
- radnn-0.0.9/src/radnn/system/threads/thread_worker.py +68 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/utils.py +43 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn.egg-info/PKG-INFO +4 -25
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn.egg-info/SOURCES.txt +19 -5
- radnn-0.0.9/test/test_corpus.py +91 -0
- radnn-0.0.9/test/test_corpus_load.py +209 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/test/test_data_feed.py +4 -2
- radnn-0.0.9/test/test_image_dataset_from_files.py +166 -0
- radnn-0.0.9/test/test_ml_experiment_config.py +25 -0
- radnn-0.0.9/test/test_text_pipeline.py +17 -0
- radnn-0.0.7.3/test/test_filestore.py +0 -4
- radnn-0.0.7.3/test/test_filesystem.py +0 -15
- radnn-0.0.7.3/test/test_hosts.py +0 -2
- radnn-0.0.7.3/test/test_json.py +0 -9
- radnn-0.0.7.3/test/test_sort.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/LICENSE.txt +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/README.md +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/setup.cfg +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/core.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/data/image_dataset.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/data/preprocess/__init__.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/data/preprocess/normalizer.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/data/preprocess/standardizer.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/data/sample_set.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/data/sequence_dataset.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/errors.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/evaluation/__init__.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/evaluation/evaluate_classification.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/experiment/__init__.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/experiment/ml_experiment.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/experiment/ml_experiment_env.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/experiment/ml_experiment_store.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/learn/__init__.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/learn/keras_learning_rate_scheduler.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/learn/keras_optimization_algorithm.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/learn/learning_algorithm.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/learn/state/__init__.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/learn/state/keras_best_state_saver.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/plots/__init__.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/plots/plot_confusion_matrix.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/plots/plot_learning_curve.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/plots/plot_multi_scatter.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/plots/plot_roc.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/plots/plot_voronoi_2d.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/system/__init__.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/system/files/csvfile.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/system/files/fileobject.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/system/files/imgfile.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/system/files/picklefile.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/system/hosts/__init__.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/system/hosts/colab_host.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/system/hosts/linux_host.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn/system/tee_logger.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn.egg-info/dependency_links.txt +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn.egg-info/requires.txt +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/src/radnn.egg-info/top_level.txt +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/test/test_config.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/test/test_dataset_base.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/test/test_dataset_from_pandas.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/test/test_experiment_env.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/test/test_ml_system.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/test/test_mlsystem.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/test/test_mnist.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/test/test_normalizer.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/test/test_normalizer_div_zero.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/test/test_sample_set.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/test/test_standardizer.py +0 -0
- {radnn-0.0.7.3 → radnn-0.0.9}/test/test_train.py +0 -0
|
@@ -1,35 +1,13 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: radnn
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.9
|
|
4
4
|
Summary: Rapid Deep Neural Networks
|
|
5
5
|
Author-email: "Pantelis I. Kaplanoglou" <pikaplanoglou@ihu.gr>
|
|
6
|
-
License: MIT
|
|
7
|
-
|
|
8
|
-
Copyright (c) 2017-2025 Pantelis I. Kaplanoglou
|
|
9
|
-
|
|
10
|
-
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
11
|
-
of this software and associated documentation files (the "Software"), to deal
|
|
12
|
-
in the Software without restriction, including without limitation the rights
|
|
13
|
-
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
14
|
-
copies of the Software, and to permit persons to whom the Software is
|
|
15
|
-
furnished to do so, subject to the following conditions:
|
|
16
|
-
|
|
17
|
-
The above copyright notice and this permission notice shall be included in all
|
|
18
|
-
copies or substantial portions of the Software.
|
|
19
|
-
|
|
20
|
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
21
|
-
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
22
|
-
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
23
|
-
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
24
|
-
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
25
|
-
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
26
|
-
SOFTWARE.
|
|
27
|
-
|
|
6
|
+
License-Expression: MIT
|
|
28
7
|
Project-URL: Homepage, https://github.com/pikaplan/radnn
|
|
29
8
|
Project-URL: Documentation, https://radnn.readthedocs.io/
|
|
30
9
|
Classifier: Intended Audience :: Science/Research
|
|
31
10
|
Classifier: Intended Audience :: Developers
|
|
32
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
33
11
|
Classifier: Programming Language :: Python
|
|
34
12
|
Classifier: Topic :: Software Development
|
|
35
13
|
Classifier: Topic :: Scientific/Engineering
|
|
@@ -47,6 +25,7 @@ Requires-Dist: numpy>=1.26.4
|
|
|
47
25
|
Requires-Dist: matplotlib>=3.8.4
|
|
48
26
|
Requires-Dist: pandas>=2.2.1
|
|
49
27
|
Requires-Dist: scikit-learn>=1.4.2
|
|
28
|
+
Dynamic: license-file
|
|
50
29
|
|
|
51
30
|
# radnn - Rapid Deep Neural Networks
|
|
52
31
|
|
|
@@ -1,18 +1,17 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "radnn"
|
|
3
|
-
version = "0.0.
|
|
3
|
+
version = "0.0.9"
|
|
4
4
|
description = "Rapid Deep Neural Networks"
|
|
5
5
|
readme = "README.md"
|
|
6
6
|
authors = [
|
|
7
7
|
{name = "Pantelis I. Kaplanoglou", email = "pikaplanoglou@ihu.gr"}
|
|
8
8
|
]
|
|
9
|
-
|
|
10
9
|
requires-python = ">=3.7"
|
|
11
|
-
license =
|
|
10
|
+
license = "MIT"
|
|
11
|
+
license-files = ["LICENSE.txt"]
|
|
12
12
|
classifiers=[
|
|
13
13
|
"Intended Audience :: Science/Research",
|
|
14
14
|
"Intended Audience :: Developers",
|
|
15
|
-
"License :: OSI Approved :: MIT License",
|
|
16
15
|
"Programming Language :: Python",
|
|
17
16
|
"Topic :: Software Development",
|
|
18
17
|
"Topic :: Scientific/Engineering",
|
|
@@ -3,7 +3,9 @@
|
|
|
3
3
|
# Version 0.0.6 [2025-02-04]
|
|
4
4
|
# Version 0.0.7.2 [2025-02-17]
|
|
5
5
|
# Version 0.0.7.3 [2025-02-21]
|
|
6
|
-
|
|
6
|
+
# Version 0.0.8 [2025-02-25]
|
|
7
|
+
# Version 0.0.9 [2025-03-xx]
|
|
8
|
+
__version__ = "0.0.9"
|
|
7
9
|
|
|
8
10
|
from .system import FileStore, FileSystem
|
|
9
11
|
from .ml_system import MLSystem
|
|
@@ -57,10 +57,15 @@ class DataFeed(object):
|
|
|
57
57
|
self.batch_size = None
|
|
58
58
|
|
|
59
59
|
self._has_mapped_preprocessing_method = False
|
|
60
|
+
self._is_multiclass = False
|
|
60
61
|
|
|
61
62
|
self.feed = self.build_iterator()
|
|
62
63
|
self.pipeline_objects.append(self.feed)
|
|
63
64
|
# --------------------------------------------------------------------------------------------------------------------
|
|
65
|
+
def multiclass(self):
|
|
66
|
+
self._is_multiclass = True
|
|
67
|
+
return self
|
|
68
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
64
69
|
def normalize(self):
|
|
65
70
|
self.value_preprocessor = Normalizer(self.dataset.name, self.dataset.filestore)
|
|
66
71
|
if self.value_preprocessor.min is None:
|
|
@@ -236,7 +236,7 @@ class DataSetBase(object):
|
|
|
236
236
|
def has_cache(self, samples_file_prefix="Samples"):
|
|
237
237
|
return self.filestore.exists("%s.pkl" % samples_file_prefix) or self.filestore.exists("%s.TS.pkl" % samples_file_prefix)
|
|
238
238
|
# --------------------------------------------------------------------------------------------------------------------
|
|
239
|
-
def load_cache(self, filestore: FileStore = None, samples_file_prefix="Samples", targets_file_prefix="Labels", is_verbose=False):
|
|
239
|
+
def load_cache(self, filestore: FileStore = None, samples_file_prefix="Samples", targets_file_prefix="Labels", ids_file_prefix="Ids", is_verbose=False):
|
|
240
240
|
if filestore is None:
|
|
241
241
|
filestore = self.filestore
|
|
242
242
|
if filestore is None:
|
|
@@ -258,30 +258,39 @@ class DataSetBase(object):
|
|
|
258
258
|
|
|
259
259
|
self.samples = filestore.obj.load("%s.pkl" % samples_file_prefix)
|
|
260
260
|
self.labels = filestore.obj.load("%s.pkl" % targets_file_prefix)
|
|
261
|
-
|
|
261
|
+
|
|
262
262
|
if is_verbose:
|
|
263
263
|
print("Loading training set ...")
|
|
264
264
|
nTSSamples = filestore.obj.load("%s.TS.pkl" % samples_file_prefix)
|
|
265
265
|
nTSTargets = filestore.obj.load("%s.TS.pkl" % targets_file_prefix)
|
|
266
266
|
self.assign_training_set(nTSSamples, nTSTargets)
|
|
267
|
-
|
|
267
|
+
nTSIDs = filestore.obj.load("%s.TS.pkl" % ids_file_prefix)
|
|
268
|
+
if nTSIDs is not None:
|
|
269
|
+
self.ts_sample_ids = nTSIDs
|
|
270
|
+
|
|
268
271
|
if is_verbose:
|
|
269
272
|
print("Loading validation set ...")
|
|
270
273
|
nVSSamples = filestore.obj.load("%s.VS.pkl" % samples_file_prefix)
|
|
271
274
|
nVSTargets = filestore.obj.load("%s.VS.pkl" % targets_file_prefix)
|
|
272
275
|
self.assign_validation_set(nVSSamples, nVSTargets)
|
|
273
|
-
|
|
276
|
+
nVSIds = filestore.obj.load("%s.VS.pkl" % ids_file_prefix)
|
|
277
|
+
if nVSIds is not None:
|
|
278
|
+
self.vs_sample_ids = nVSIds
|
|
279
|
+
|
|
274
280
|
if is_verbose:
|
|
275
281
|
print("Loading unknown test data set ...")
|
|
276
282
|
nUTSamples = filestore.obj.load("%s.UT.pkl" % samples_file_prefix)
|
|
277
283
|
if nUTSamples is not None:
|
|
278
284
|
nUTTargets = filestore.obj.load("%s.UT.pkl" % targets_file_prefix)
|
|
279
285
|
self.assign_unknown_test_set(nUTSamples, nUTTargets)
|
|
286
|
+
nUTIds = filestore.obj.load("%s.UT.pkl" % ids_file_prefix)
|
|
287
|
+
if nUTIds is not None:
|
|
288
|
+
self.ut_sample_ids = nUTIds
|
|
280
289
|
|
|
281
290
|
|
|
282
291
|
return bResult
|
|
283
292
|
# --------------------------------------------------------------------------------------------------------------------
|
|
284
|
-
def save_cache(self, filestore: FileStore = None, samples_file_prefix="Samples", targets_file_prefix="Labels"):
|
|
293
|
+
def save_cache(self, filestore: FileStore = None, samples_file_prefix="Samples", targets_file_prefix="Labels", ids_file_prefix="Ids"):
|
|
285
294
|
if filestore is None:
|
|
286
295
|
filestore = self.filestore
|
|
287
296
|
if filestore is None:
|
|
@@ -293,13 +302,16 @@ class DataSetBase(object):
|
|
|
293
302
|
|
|
294
303
|
filestore.obj.save(self.ts_samples, "%s.TS.pkl" % samples_file_prefix, is_overwriting=True)
|
|
295
304
|
filestore.obj.save(self.ts_labels, "%s.TS.pkl" % targets_file_prefix, is_overwriting=True)
|
|
305
|
+
filestore.obj.save(self.ts_sample_ids, "%s.TS.pkl" % ids_file_prefix, is_overwriting=True)
|
|
296
306
|
|
|
297
307
|
filestore.obj.save(self.vs_samples, "%s.VS.pkl" % samples_file_prefix, is_overwriting=True)
|
|
298
308
|
filestore.obj.save(self.vs_labels, "%s.VS.pkl" % targets_file_prefix, is_overwriting=True)
|
|
309
|
+
filestore.obj.save(self.vs_sample_ids, "%s.VS.pkl" % ids_file_prefix, is_overwriting=True)
|
|
299
310
|
|
|
300
311
|
if self.ut_samples is not None:
|
|
301
312
|
filestore.obj.save(self.ut_samples, "%s.UT.pkl" % samples_file_prefix, is_overwriting=True)
|
|
302
313
|
filestore.obj.save(self.ut_labels, "%s.UT.pkl" % targets_file_prefix, is_overwriting=True)
|
|
314
|
+
filestore.obj.save(self.ut_sample_ids, "%s.UT.pkl" % ids_file_prefix, is_overwriting=True)
|
|
303
315
|
|
|
304
316
|
self.card["name"] = self.name
|
|
305
317
|
if self.feature_count is not None:
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
from .subset_type import SubsetType
|
|
2
|
+
|
|
3
|
+
class DataSetFolder(object):
|
|
4
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
5
|
+
def __init__(self, folder_name, filestore):
|
|
6
|
+
self.folder_name = folder_name
|
|
7
|
+
self.filestore = filestore
|
|
8
|
+
self.filestore_ts = None
|
|
9
|
+
self.filestore_vs = None
|
|
10
|
+
self.filestore_ut = None
|
|
11
|
+
self.split_filestores = []
|
|
12
|
+
|
|
13
|
+
self.subfolders = self.filestore.list_folders(is_full_path=False)
|
|
14
|
+
self.is_split, sTSFolder, sVSFolder, sUTFolder = self.get_split_subfolders(self.subfolders)
|
|
15
|
+
if self.is_split:
|
|
16
|
+
if sTSFolder is not None:
|
|
17
|
+
self.filestore_ts = self.filestore.subfs(sTSFolder, must_exist=True)
|
|
18
|
+
self.split_filestores.append(self.filestore_ts)
|
|
19
|
+
if sVSFolder is not None:
|
|
20
|
+
self.filestore_vs = self.filestore.subfs(sVSFolder, must_exist=True)
|
|
21
|
+
self.split_filestores.append(self.filestore_vs)
|
|
22
|
+
if sUTFolder is not None:
|
|
23
|
+
self.filestore_ut = self.filestore.subfs(sUTFolder, must_exist=True)
|
|
24
|
+
self.split_filestores.append(self.filestore_ut)
|
|
25
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
26
|
+
def get_split_subfolders(self, folders):
|
|
27
|
+
sTSFolder = None
|
|
28
|
+
sVSFolder = None
|
|
29
|
+
sUTFolder = None
|
|
30
|
+
bIsSplit = False
|
|
31
|
+
for sFolder in folders:
|
|
32
|
+
oFolderSubsetType = SubsetType(sFolder)
|
|
33
|
+
if oFolderSubsetType.is_training_set:
|
|
34
|
+
sTSFolder = sFolder
|
|
35
|
+
bIsSplit = True
|
|
36
|
+
elif oFolderSubsetType.is_validation_set:
|
|
37
|
+
sVSFolder = sFolder
|
|
38
|
+
bIsSplit = True
|
|
39
|
+
elif oFolderSubsetType.is_unknown_test_set:
|
|
40
|
+
sUTFolder = sFolder
|
|
41
|
+
bIsSplit = True
|
|
42
|
+
|
|
43
|
+
# When confusing terminology is uses and 'test' means 'validation'
|
|
44
|
+
if (sUTFolder is not None) and (sVSFolder is None):
|
|
45
|
+
sVSFolder = sUTFolder
|
|
46
|
+
sUTFolder = None
|
|
47
|
+
|
|
48
|
+
return bIsSplit, sTSFolder, sVSFolder, sUTFolder
|
|
49
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
50
|
+
def __str__(self):
|
|
51
|
+
return "./" + self.folder_name
|
|
52
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
53
|
+
def __repr__(self):
|
|
54
|
+
return self.__str__()
|
|
55
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
from radnn import FileStore
|
|
2
|
+
from radnn.utils import camel_case
|
|
3
|
+
from radnn.system.files import FileList
|
|
4
|
+
from .dataset_folder import DataSetFolder
|
|
5
|
+
import sys
|
|
6
|
+
from tqdm import tqdm
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
|
|
9
|
+
class ImageDataSetFiles(object):
|
|
10
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
11
|
+
def __init__(self, images_fs, name="files", is_progress_indicator=True):
|
|
12
|
+
self.images_fs :FileStore = images_fs
|
|
13
|
+
self.name = name
|
|
14
|
+
self.is_progress_indicator = is_progress_indicator
|
|
15
|
+
self.class_names :dict = dict()
|
|
16
|
+
self.class_folders :list = []
|
|
17
|
+
self.files :FileList = dict()
|
|
18
|
+
self.files_ts :FileList = dict()
|
|
19
|
+
self.files_vs :FileList = dict()
|
|
20
|
+
self.files_ut :FileList = dict()
|
|
21
|
+
self.total_file_count = 0
|
|
22
|
+
self.is_split_on_main_folder = False
|
|
23
|
+
self.is_split_in_class_folders = False
|
|
24
|
+
self.run_date_time = None
|
|
25
|
+
|
|
26
|
+
self.detect_class_names_from_folders()
|
|
27
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
28
|
+
def load(self, extensions="*.jpg; *.png"):
|
|
29
|
+
if not self.load_file_lists():
|
|
30
|
+
self.detect_files(extensions)
|
|
31
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
32
|
+
def load_file_lists(self):
|
|
33
|
+
bResult = False
|
|
34
|
+
oDict = self.images_fs.obj.load(f"{self.name}-file-info.pkl")
|
|
35
|
+
if oDict is not None:
|
|
36
|
+
self.run_date_time = oDict["RunDateTime"]
|
|
37
|
+
self.class_names = oDict["ClassNames"]
|
|
38
|
+
self.class_folders = oDict["ClassFolders"]
|
|
39
|
+
self.files = oDict["ClassFiles.All"]
|
|
40
|
+
self.files_ts = oDict["ClassFiles.TrainingSet"]
|
|
41
|
+
self.files_vs = oDict["ClassFiles.ValidationSet"]
|
|
42
|
+
self.files_ut = oDict["ClassFiles.UnknownTestSet"]
|
|
43
|
+
self.total_file_count = oDict["TotalFileCount"]
|
|
44
|
+
self.is_split_on_main_folder = oDict["IsSplitOnMainFolder"]
|
|
45
|
+
self.is_split_in_class_folders = oDict["IsSplitInClassFolders"]
|
|
46
|
+
bResult = True
|
|
47
|
+
|
|
48
|
+
return bResult
|
|
49
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
50
|
+
def save_file_lists(self):
|
|
51
|
+
oDict = dict()
|
|
52
|
+
oDict["RunDateTime"] = self.run_date_time
|
|
53
|
+
oDict["ClassNames"] = self.class_names
|
|
54
|
+
oDict["ClassFolders"] = self.class_folders
|
|
55
|
+
oDict["ClassFiles.All"] = self.files
|
|
56
|
+
oDict["ClassFiles.TrainingSet"] = self.files_ts
|
|
57
|
+
oDict["ClassFiles.ValidationSet"] = self.files_vs
|
|
58
|
+
oDict["ClassFiles.UnknownTestSet"] = self.files_ut
|
|
59
|
+
oDict["TotalFileCount"] = self.total_file_count
|
|
60
|
+
oDict["IsSplitOnMainFolder"] = self.is_split_on_main_folder
|
|
61
|
+
oDict["IsSplitInClassFolders"] = self.is_split_in_class_folders
|
|
62
|
+
self.images_fs.obj.save(oDict, f"{self.name}-file-info.pkl")
|
|
63
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
64
|
+
def detect_class_names_from_folders(self):
|
|
65
|
+
oClassNamesFS = self.images_fs
|
|
66
|
+
oMainFolder = DataSetFolder("/", self.images_fs)
|
|
67
|
+
oFolders = oMainFolder.subfolders
|
|
68
|
+
|
|
69
|
+
self.is_split_on_main_folder = oMainFolder.is_split
|
|
70
|
+
if self.is_split_on_main_folder:
|
|
71
|
+
# Detect the class names under the training set subfolder
|
|
72
|
+
oClassNamesFS = oMainFolder.filestore_ts
|
|
73
|
+
oFolders = oClassNamesFS.list_folders(is_full_path=False)
|
|
74
|
+
|
|
75
|
+
for nIndex, sFolder in enumerate(oFolders):
|
|
76
|
+
sClassName = camel_case(sFolder)
|
|
77
|
+
self.class_names[nIndex] = sClassName
|
|
78
|
+
oClassFS = oClassNamesFS.subfs(sFolder, must_exist=True)
|
|
79
|
+
oClassFolder = DataSetFolder(sFolder, oClassFS)
|
|
80
|
+
if not self.is_split_on_main_folder:
|
|
81
|
+
if oClassFolder.is_split:
|
|
82
|
+
self.is_split_in_class_folders = True
|
|
83
|
+
self.class_folders.append(oClassFolder)
|
|
84
|
+
|
|
85
|
+
return self.class_folders
|
|
86
|
+
|
|
87
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
88
|
+
def traverse_sub_folders(self, extensions, progress):
|
|
89
|
+
for nClassIndex, oClassFolder in enumerate(self.class_folders):
|
|
90
|
+
if progress is not None:
|
|
91
|
+
progress.set_description(f"Finding files for class {self.class_names[nClassIndex]}")
|
|
92
|
+
progress.refresh()
|
|
93
|
+
self.files[nClassIndex] = oClassFolder.filestore.filelist(extensions)
|
|
94
|
+
self.total_file_count += len(self.files[nClassIndex])
|
|
95
|
+
if progress is not None:
|
|
96
|
+
progress.update(1)
|
|
97
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
98
|
+
def traverse_sub_folders_with_split(self, extensions, progress):
|
|
99
|
+
self.total_file_count = 0
|
|
100
|
+
for nClassIndex, oClassFolder in enumerate(self.class_folders):
|
|
101
|
+
if progress is not None:
|
|
102
|
+
progress.set_description(f"Finding files for class {self.class_names[nClassIndex]}")
|
|
103
|
+
progress.refresh()
|
|
104
|
+
if oClassFolder.is_split:
|
|
105
|
+
oClassAllFiles = FileList()
|
|
106
|
+
for nIndex, oSplitFileStore in enumerate(oClassFolder.split_filestores):
|
|
107
|
+
if oSplitFileStore is not None:
|
|
108
|
+
oFileList = oSplitFileStore.filelist(extensions)
|
|
109
|
+
for oFile in oFileList.full_paths:
|
|
110
|
+
dSplit = None
|
|
111
|
+
if oSplitFileStore == oClassFolder.filestore_ts:
|
|
112
|
+
dSplit = self.files_ts
|
|
113
|
+
elif oSplitFileStore == oClassFolder.filestore_vs:
|
|
114
|
+
dSplit = self.files_vs
|
|
115
|
+
elif oSplitFileStore == oClassFolder.filestore_ut:
|
|
116
|
+
dSplit = self.files_ut
|
|
117
|
+
|
|
118
|
+
if dSplit is not None:
|
|
119
|
+
if nClassIndex not in dSplit:
|
|
120
|
+
dSplit[nClassIndex] = []
|
|
121
|
+
dSplit[nClassIndex].append(oFile)
|
|
122
|
+
|
|
123
|
+
oClassAllFiles.append(oFile)
|
|
124
|
+
else:
|
|
125
|
+
raise Exception(f"No split subfolders for class {nIndex} '{self.class_names[nIndex]}',\n"
|
|
126
|
+
+ f"that is stored in {oClassFolder.filestore}\n"
|
|
127
|
+
+ f"All of the classes should have the same split.")
|
|
128
|
+
self.files[nClassIndex] = oClassAllFiles
|
|
129
|
+
self.total_file_count += len(self.files[nClassIndex])
|
|
130
|
+
if progress is not None:
|
|
131
|
+
progress.update(1)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
if progress is not None:
|
|
135
|
+
progress.set_description("Finished")
|
|
136
|
+
progress.refresh()
|
|
137
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
138
|
+
def detect_files(self, extensions="*.jpg; *.png"):
|
|
139
|
+
oProgress = None
|
|
140
|
+
if len(self.class_folders) > 0:
|
|
141
|
+
if (not self.is_split_on_main_folder) and (not self.is_split_in_class_folders):
|
|
142
|
+
if self.is_progress_indicator:
|
|
143
|
+
oProgress = tqdm(total=len(self.class_folders), ncols=80)
|
|
144
|
+
try:
|
|
145
|
+
self.traverse_sub_folders(extensions, oProgress)
|
|
146
|
+
finally:
|
|
147
|
+
if self.is_progress_indicator:
|
|
148
|
+
oProgress.close()
|
|
149
|
+
|
|
150
|
+
elif (not self.is_split_on_main_folder) and self.is_split_in_class_folders:
|
|
151
|
+
if self.is_progress_indicator:
|
|
152
|
+
oProgress = tqdm(total=len(self.class_folders), ncols=80)
|
|
153
|
+
try:
|
|
154
|
+
self.traverse_sub_folders_with_split(extensions, oProgress)
|
|
155
|
+
finally:
|
|
156
|
+
if self.is_progress_indicator:
|
|
157
|
+
oProgress.close()
|
|
158
|
+
|
|
159
|
+
self.save_file_lists()
|
|
160
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
|
|
@@ -24,8 +24,14 @@
|
|
|
24
24
|
# ......................................................................................
|
|
25
25
|
class SubsetType(object):
|
|
26
26
|
def __init__(self, name):
|
|
27
|
-
self.name = name
|
|
28
|
-
|
|
27
|
+
self.name = name.lower()
|
|
28
|
+
self.type = -1 # Unknown
|
|
29
|
+
if self.is_training_set:
|
|
30
|
+
self.type = 0
|
|
31
|
+
elif self.is_validation_set:
|
|
32
|
+
self.type = 1
|
|
33
|
+
elif self.is_unknown_test_set:
|
|
34
|
+
self.type = 2
|
|
29
35
|
@property
|
|
30
36
|
def is_training_set(self):
|
|
31
37
|
return (self.name == "training") or (self.name == "train") or (self.name == "ts")
|
|
@@ -66,23 +66,39 @@ class TFClassificationDataFeed(DataFeed):
|
|
|
66
66
|
feed = tf.data.Dataset.from_tensor_slices((self.dataset.ut_samples, self.dataset.ut_labels))
|
|
67
67
|
return feed
|
|
68
68
|
# --------------------------------------------------------------------------------------------------------------------
|
|
69
|
-
def
|
|
69
|
+
def preprocess_normalize_onehot(self, samples, labels):
|
|
70
70
|
tSamples = tf.cast(samples, tf.float32)
|
|
71
71
|
tSamples = (tSamples - self.value_preprocessor.min) / (self.value_preprocessor.max - self.value_preprocessor.min)
|
|
72
72
|
tTargetsOneHot = tf.one_hot(labels, self.dataset.class_count)
|
|
73
73
|
return tSamples, tTargetsOneHot
|
|
74
74
|
# --------------------------------------------------------------------------------------------------------------------
|
|
75
|
-
def
|
|
75
|
+
def preprocess_standardize_onehot(self, samples, labels):
|
|
76
76
|
tSamples = tf.cast(samples, tf.float32)
|
|
77
77
|
tSamples = (tSamples - self.value_preprocessor.mean) / self.value_preprocessor.std
|
|
78
78
|
tTargetsOneHot = tf.one_hot(labels, self.dataset.class_count)
|
|
79
79
|
return tSamples, tTargetsOneHot
|
|
80
80
|
# --------------------------------------------------------------------------------------------------------------------
|
|
81
|
+
def preprocess_normalize(self, samples, labels):
|
|
82
|
+
tSamples = tf.cast(samples, tf.float32)
|
|
83
|
+
tSamples = (tSamples - self.value_preprocessor.min) / (self.value_preprocessor.max - self.value_preprocessor.min)
|
|
84
|
+
return tSamples, labels
|
|
85
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
86
|
+
def preprocess_standardize(self, samples, labels):
|
|
87
|
+
tSamples = tf.cast(samples, tf.float32)
|
|
88
|
+
tSamples = (tSamples - self.value_preprocessor.mean) / self.value_preprocessor.std
|
|
89
|
+
return tSamples, labels
|
|
90
|
+
# --------------------------------------------------------------------------------------------------------------------
|
|
81
91
|
def build_preprocessor(self, feed):
|
|
82
|
-
if
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
92
|
+
if self._is_multiclass:
|
|
93
|
+
if isinstance(self.value_preprocessor, Standardizer):
|
|
94
|
+
feed = feed.map(self.preprocess_standardize_onehot, num_parallel_calls=8)
|
|
95
|
+
elif isinstance(self.value_preprocessor, Normalizer):
|
|
96
|
+
feed = feed.map(self.preprocess_normalize_onehot, num_parallel_calls=8)
|
|
97
|
+
else:
|
|
98
|
+
if isinstance(self.value_preprocessor, Standardizer):
|
|
99
|
+
feed = feed.map(self.preprocess_standardize, num_parallel_calls=8)
|
|
100
|
+
elif isinstance(self.value_preprocessor, Normalizer):
|
|
101
|
+
feed = feed.map(self.preprocess_normalize, num_parallel_calls=8)
|
|
86
102
|
return feed
|
|
87
103
|
# --------------------------------------------------------------------------------------------------------------------
|
|
88
104
|
def build_random_shuffler(self, feed):
|
|
@@ -31,8 +31,8 @@ import os
|
|
|
31
31
|
import json
|
|
32
32
|
import re
|
|
33
33
|
from datetime import datetime
|
|
34
|
-
|
|
35
|
-
from radnn.system import FileSystem
|
|
34
|
+
from radnn.ml_system import mlsys
|
|
35
|
+
from radnn.system import FileSystem, FileStore
|
|
36
36
|
|
|
37
37
|
# --------------------------------------------------------------------------------------
|
|
38
38
|
def model_code_mllib(p_oDict):
|
|
@@ -146,9 +146,11 @@ def experiment_code_and_timestamp(filename):
|
|
|
146
146
|
|
|
147
147
|
|
|
148
148
|
# =========================================================================================================================
|
|
149
|
-
class MLExperimentConfig(
|
|
149
|
+
class MLExperimentConfig(object):
|
|
150
150
|
# --------------------------------------------------------------------------------------
|
|
151
151
|
def __init__(self, filename=None, base_name=None, number=None, variation=None, fold_number=None, hyperparams=None):
|
|
152
|
+
self._kv = dict()
|
|
153
|
+
|
|
152
154
|
self["Experiment.BaseName"] = base_name
|
|
153
155
|
self.filename = filename
|
|
154
156
|
if self.filename is not None:
|
|
@@ -164,28 +166,19 @@ class MLExperimentConfig(dict):
|
|
|
164
166
|
if hyperparams is not None:
|
|
165
167
|
self.assign(hyperparams)
|
|
166
168
|
# --------------------------------------------------------------------------------------
|
|
169
|
+
def __getitem__(self, key):
|
|
170
|
+
return self._kv[key]
|
|
171
|
+
# --------------------------------------------------------------------------------------
|
|
172
|
+
def __setitem__(self, key, value):
|
|
173
|
+
self._kv[key] = value
|
|
174
|
+
# --------------------------------------------------------------------------------------
|
|
175
|
+
def __contains__(self, key):
|
|
176
|
+
return key in self._kv
|
|
177
|
+
# --------------------------------------------------------------------------------------
|
|
167
178
|
@property
|
|
168
179
|
def experiment_code(self):
|
|
169
180
|
return get_experiment_code(self)
|
|
170
181
|
# --------------------------------------------------------------------------------------
|
|
171
|
-
def load(self, filename=None, must_exist=False):
|
|
172
|
-
if filename is None:
|
|
173
|
-
filename = self.filename
|
|
174
|
-
|
|
175
|
-
# reading the data from the file
|
|
176
|
-
if os.path.exists(filename):
|
|
177
|
-
with open(filename) as oFile:
|
|
178
|
-
sConfig = oFile.read()
|
|
179
|
-
self.setDefaults()
|
|
180
|
-
dConfigDict = json.loads(sConfig)
|
|
181
|
-
|
|
182
|
-
for sKey in dConfigDict.keys():
|
|
183
|
-
self[sKey] = dConfigDict[sKey]
|
|
184
|
-
else:
|
|
185
|
-
if must_exist:
|
|
186
|
-
raise Exception("Experiment configuration file %s is not found." % filename)
|
|
187
|
-
return self
|
|
188
|
-
# --------------------------------------------------------------------------------------
|
|
189
182
|
def assign(self, config_dict):
|
|
190
183
|
for sKey in config_dict.keys():
|
|
191
184
|
self[sKey] = config_dict[sKey]
|
|
@@ -200,16 +193,19 @@ class MLExperimentConfig(dict):
|
|
|
200
193
|
if filename is not None:
|
|
201
194
|
self.filename = filename
|
|
202
195
|
|
|
203
|
-
sJSON = json.dumps(self, sort_keys=False, indent=4)
|
|
196
|
+
sJSON = json.dumps(self._kv, sort_keys=False, indent=4)
|
|
204
197
|
with open(self.filename, "w") as oFile:
|
|
205
198
|
oFile.write(sJSON)
|
|
206
199
|
oFile.close()
|
|
207
|
-
|
|
208
200
|
return self
|
|
209
201
|
# --------------------------------------------------------------------------------------
|
|
210
|
-
def save(self, fs, filename_only=None):
|
|
211
|
-
if
|
|
202
|
+
def save(self, fs=None, filename_only=None):
|
|
203
|
+
if fs is None:
|
|
204
|
+
fs = mlsys.filesys.configs
|
|
205
|
+
elif isinstance(fs, FileSystem):
|
|
212
206
|
fs = fs.configs
|
|
207
|
+
elif not isinstance(fs, FileStore):
|
|
208
|
+
raise Exception("Unsupporting persistent storage")
|
|
213
209
|
|
|
214
210
|
if filename_only is None:
|
|
215
211
|
filename_only = get_experiment_code(self)
|
|
@@ -221,19 +217,48 @@ class MLExperimentConfig(dict):
|
|
|
221
217
|
# Backwards compatibility 0.6.0
|
|
222
218
|
return self.save()
|
|
223
219
|
# --------------------------------------------------------------------------------------
|
|
224
|
-
def
|
|
225
|
-
if
|
|
220
|
+
def load_from_json(self, filename=None, must_exist=False):
|
|
221
|
+
if filename is None:
|
|
222
|
+
filename = self.filename
|
|
223
|
+
|
|
224
|
+
# reading the data from the file
|
|
225
|
+
if os.path.exists(filename):
|
|
226
|
+
with open(filename) as oFile:
|
|
227
|
+
sConfig = oFile.read()
|
|
228
|
+
self.setDefaults()
|
|
229
|
+
dConfigDict = json.loads(sConfig)
|
|
230
|
+
|
|
231
|
+
for sKey in dConfigDict.keys():
|
|
232
|
+
self._kv[sKey] = dConfigDict[sKey]
|
|
233
|
+
else:
|
|
234
|
+
if must_exist:
|
|
235
|
+
raise Exception("Experiment configuration file %s is not found." % filename)
|
|
236
|
+
return self
|
|
237
|
+
# --------------------------------------------------------------------------------------
|
|
238
|
+
def load(self, fs=None, filename_only=None):
|
|
239
|
+
if fs is None:
|
|
240
|
+
fs = mlsys.filesys.configs
|
|
241
|
+
elif isinstance(fs, FileSystem):
|
|
226
242
|
fs = fs.configs
|
|
243
|
+
elif not isinstance(fs, FileStore):
|
|
244
|
+
raise Exception("Unsupporting persistent storage")
|
|
245
|
+
|
|
246
|
+
if filename_only is None:
|
|
247
|
+
filename_only = get_experiment_code(self)
|
|
227
248
|
|
|
228
249
|
sFileName = fs.file(filename_only + ".json")
|
|
229
|
-
return self.
|
|
250
|
+
return self.load_from_json(sFileName)
|
|
251
|
+
# --------------------------------------------------------------------------------------
|
|
252
|
+
def load_config(self, fs, filename_only):
|
|
253
|
+
# Backwards compatibility 0.6.0
|
|
254
|
+
return self.load(fs, filename_only)
|
|
230
255
|
# --------------------------------------------------------------------------------------
|
|
231
256
|
def setDefaults(self):
|
|
232
257
|
pass
|
|
233
258
|
# --------------------------------------------------------------------------------------
|
|
234
259
|
def __str__(self)->str:
|
|
235
260
|
sResult = ""
|
|
236
|
-
for sKey in self.keys():
|
|
261
|
+
for sKey in self._kv.keys():
|
|
237
262
|
sResult += f' {sKey}: \"{self[sKey]}\",\n'
|
|
238
263
|
|
|
239
264
|
sResult = "{\n" + sResult + "}"
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
from matplotlib import colors
|
|
3
|
+
from matplotlib.colors import to_rgba
|
|
4
|
+
|
|
5
|
+
LUMA_W = np.asarray([0.29889531 / 255.0, 0.58662247 / 255.0, 0.11448223 / 255.0], dtype=np.float32)
|
|
6
|
+
|
|
7
|
+
# ------------------------------------------------------------------------------------
|
|
8
|
+
# Analyse the image to H,S,L, B
|
|
9
|
+
def image_rgb_to_hslb(image):
|
|
10
|
+
'''
|
|
11
|
+
Analyzes an image and returns the hue, saturation, luma and brightness
|
|
12
|
+
:param image:
|
|
13
|
+
:return: image in HSLB format
|
|
14
|
+
'''
|
|
15
|
+
img_hsv = colors.rgb_to_hsv(image / 255.0)
|
|
16
|
+
luma = np.dot(image, LUMA_W.T)
|
|
17
|
+
return np.stack([img_hsv[..., 0], img_hsv[..., 1], luma, img_hsv[..., 2]], axis=-1).astype(np.float32)
|
|
18
|
+
# ------------------------------------------------------------------------------------
|
|
19
|
+
def image_rgb_to_hif(image):
|
|
20
|
+
oImageHSLB = image_rgb_to_hslb(image)
|
|
21
|
+
|
|
22
|
+
img_hsv = colors.rgb_to_hsv(image / 255.0)
|
|
23
|
+
luma = np.dot(image, LUMA_W.T)
|
|
24
|
+
return np.stack([img_hsv[..., 0], img_hsv[..., 1], luma], axis=-1).astype(np.float32)
|
|
25
|
+
# ------------------------------------------------------------------------------------
|
|
26
|
+
def color(name):
|
|
27
|
+
return tuple([int(x*255) for x in to_rgba(name)])
|
|
28
|
+
# ------------------------------------------------------------------------------------
|