radnn 0.1.5__tar.gz → 0.1.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. {radnn-0.1.5/src/radnn.egg-info → radnn-0.1.6}/PKG-INFO +1 -1
  2. {radnn-0.1.5 → radnn-0.1.6}/pyproject.toml +1 -1
  3. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/__init__.py +2 -1
  4. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data/custom_data_set.py +1 -1
  5. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data/dataset_base.py +77 -16
  6. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data/dataset_base_legacy.py +1 -1
  7. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data/errors.py +1 -1
  8. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data/sample_preprocessor.py +3 -0
  9. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data/sample_set_simple.py +4 -1
  10. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/experiment/ml_experiment.py +2 -2
  11. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/experiment/ml_experiment_log.py +25 -19
  12. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/hosts/windows_host.py +1 -1
  13. {radnn-0.1.5 → radnn-0.1.6/src/radnn.egg-info}/PKG-INFO +1 -1
  14. {radnn-0.1.5 → radnn-0.1.6}/LICENSE.txt +0 -0
  15. {radnn-0.1.5 → radnn-0.1.6}/setup.cfg +0 -0
  16. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/benchmark/__init__.py +0 -0
  17. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/benchmark/latency.py +0 -0
  18. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/benchmark/vram.py +0 -0
  19. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/core.py +0 -0
  20. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data/__init__.py +0 -0
  21. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data/constants.py +0 -0
  22. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data/data_hyperparams.py +0 -0
  23. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data/dataset_factory.py +0 -0
  24. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data/sample_set.py +0 -0
  25. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data/sample_set_kind.py +0 -0
  26. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data/sequence_dataset.py +0 -0
  27. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data/structs/__init__.py +0 -0
  28. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data/structs/tree.py +0 -0
  29. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data_beta/__init__.py +0 -0
  30. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data_beta/data_feed.py +0 -0
  31. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data_beta/dataset_base.py +0 -0
  32. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data_beta/dataset_folder.py +0 -0
  33. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data_beta/image_dataset.py +0 -0
  34. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data_beta/image_dataset_files.py +0 -0
  35. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data_beta/preprocess/__init__.py +0 -0
  36. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data_beta/preprocess/normalizer.py +0 -0
  37. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data_beta/preprocess/standardizer.py +0 -0
  38. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data_beta/sample_set.py +0 -0
  39. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data_beta/sequence_dataset.py +0 -0
  40. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data_beta/structures/__init__.py +0 -0
  41. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data_beta/structures/dictionary.py +0 -0
  42. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data_beta/subset_type.py +0 -0
  43. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/data_beta/tf_classification_data_feed.py +0 -0
  44. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/errors.py +0 -0
  45. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/evaluation/__init__.py +0 -0
  46. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/evaluation/evaluate_classification.py +0 -0
  47. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/experiment/__init__.py +0 -0
  48. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/experiment/identification.py +0 -0
  49. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/experiment/ml_experiment_config.py +0 -0
  50. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/experiment/ml_experiment_env.py +0 -0
  51. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/experiment/ml_experiment_store.py +0 -0
  52. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/images/__init__.py +0 -0
  53. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/images/colors.py +0 -0
  54. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/images/image_processor.py +0 -0
  55. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/images/transforms.py +0 -0
  56. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/learn/__init__.py +0 -0
  57. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/learn/constants.py +0 -0
  58. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/learn/keras/__init__.py +0 -0
  59. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/learn/keras/keras_best_state_saver.py +0 -0
  60. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/learn/keras/keras_learning_algorithm.py +0 -0
  61. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/learn/keras/keras_learning_rate_scheduler.py +0 -0
  62. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/learn/keras/keras_optimization_combo.py +0 -0
  63. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/learn/torch/__init__.py +0 -0
  64. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/learn/torch/gradient_descent_method.py +0 -0
  65. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/learn/torch/losses/__init__.py +0 -0
  66. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/learn/torch/losses/rmse.py +0 -0
  67. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/learn/torch/lr_schedulers.py +0 -0
  68. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/learn/torch/ml_model_freezer.py +0 -0
  69. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/learn/torch/ml_trainer.py +0 -0
  70. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/ml_system.py +0 -0
  71. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/models/__init__.py +0 -0
  72. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/models/cnn/__init__.py +0 -0
  73. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/models/cnn/cnn_stem_setup.py +0 -0
  74. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/models/model_factory.py +0 -0
  75. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/models/model_hyperparams.py +0 -0
  76. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/models/model_info.py +0 -0
  77. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/models/torch/__init__.py +0 -0
  78. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/models/torch/model_utils.py +0 -0
  79. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/models/torch/torch_model_build_adapter.py +0 -0
  80. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/plots/__init__.py +0 -0
  81. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/plots/plot_auto_multi_image.py +0 -0
  82. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/plots/plot_confusion_matrix.py +0 -0
  83. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/plots/plot_function.py +0 -0
  84. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/plots/plot_histogram_of_classes.py +0 -0
  85. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/plots/plot_learning_curve.py +0 -0
  86. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/plots/plot_legacy.py +0 -0
  87. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/plots/plot_multi_scatter.py +0 -0
  88. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/plots/plot_roc.py +0 -0
  89. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/plots/plot_visualize_dataset2d.py +0 -0
  90. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/plots/plot_voronoi_2d.py +0 -0
  91. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/stats/__init__.py +0 -0
  92. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/stats/descriptive_stats.py +0 -0
  93. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/__init__.py +0 -0
  94. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/files/__init__.py +0 -0
  95. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/files/csvfile.py +0 -0
  96. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/files/filelist.py +0 -0
  97. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/files/fileobject.py +0 -0
  98. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/files/imgfile.py +0 -0
  99. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/files/jsonfile.py +0 -0
  100. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/files/picklefile.py +0 -0
  101. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/files/textfile.py +0 -0
  102. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/files/zipfile.py +0 -0
  103. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/filestore.py +0 -0
  104. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/filesystem.py +0 -0
  105. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/hosts/__init__.py +0 -0
  106. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/hosts/colab_host.py +0 -0
  107. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/hosts/linux_host.py +0 -0
  108. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/log.py +0 -0
  109. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/tee_logger.py +0 -0
  110. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/threads/__init__.py +0 -0
  111. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/threads/semaphore_lock.py +0 -0
  112. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/threads/thread_context.py +0 -0
  113. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/threads/thread_safe_queue.py +0 -0
  114. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/threads/thread_safe_string_collection.py +0 -0
  115. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/system/threads/thread_worker.py +0 -0
  116. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/test/__init__.py +0 -0
  117. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/test/tensor_hash.py +0 -0
  118. {radnn-0.1.5 → radnn-0.1.6}/src/radnn/utils.py +0 -0
  119. {radnn-0.1.5 → radnn-0.1.6}/src/radnn.egg-info/SOURCES.txt +0 -0
  120. {radnn-0.1.5 → radnn-0.1.6}/src/radnn.egg-info/dependency_links.txt +0 -0
  121. {radnn-0.1.5 → radnn-0.1.6}/src/radnn.egg-info/requires.txt +0 -0
  122. {radnn-0.1.5 → radnn-0.1.6}/src/radnn.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: radnn
3
- Version: 0.1.5
3
+ Version: 0.1.6
4
4
  Summary: Rapid Deep Neural Networks
5
5
  Author-email: "Pantelis I. Kaplanoglou" <pikaplanoglou@ihu.gr>
6
6
  License-Expression: MIT
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "radnn"
3
- version = "0.1.5"
3
+ version = "0.1.6"
4
4
  description = "Rapid Deep Neural Networks"
5
5
  readme = "README.md"
6
6
  authors = [
@@ -9,7 +9,8 @@
9
9
  # Version 0.1.1 [2025-01-08]
10
10
  # Version 0.1.4 [2025-01-26]
11
11
  # Version 0.1.5 [2025-02-02]
12
- __version__ = "0.1.5"
12
+ # Version 0.1.6 [2025-02-03]
13
+ __version__ = "0.1.6"
13
14
 
14
15
  from .system import FileStore, FileSystem
15
16
  from .ml_system import MLSystem
@@ -23,7 +23,7 @@
23
23
 
24
24
  # ......................................................................................
25
25
 
26
- from sklearn.model_selection import train_test_split # import a standalone procedure function from the pacakge
26
+ from sklearn.model_selection import train_test_split # import a standalone procedure toyfunction from the pacakge
27
27
  from sklearn.preprocessing import StandardScaler, MinMaxScaler
28
28
  from radnn import mlsys
29
29
  from radnn.data.sample_set_simple import SampleSet
@@ -26,12 +26,15 @@ import os
26
26
  import numpy as np
27
27
  import pandas as pd
28
28
  from abc import ABC, abstractmethod
29
- from .sample_set import SampleSet
29
+ from .sample_set_simple import SampleSet
30
30
  from .sample_set_kind import SampleSetKind
31
31
  from .sample_preprocessor import SamplePreprocessor, VoidPreprocessor
32
32
  from .errors import *
33
33
  from radnn import FileStore
34
34
  from radnn import mlsys
35
+ from .constants import DataPreprocessingKind
36
+ from sklearn.model_selection import train_test_split
37
+ from sklearn.preprocessing import MinMaxScaler, StandardScaler
35
38
 
36
39
  # ======================================================================================================================
37
40
  class DataSetCallbacks(object):
@@ -73,7 +76,7 @@ class DataSetBase(ABC):
73
76
  self.feature_count = None
74
77
  self.class_count = None
75
78
  self.sample_count = None
76
-
79
+
77
80
  self.callbacks: DataSetCallbacks = callbacks
78
81
 
79
82
  self.hprm: dict | None = None
@@ -81,8 +84,39 @@ class DataSetBase(ABC):
81
84
  self.vs: SampleSet | None = None
82
85
  self.us: SampleSet | None = None
83
86
  self.preprocessor: SamplePreprocessor = VoidPreprocessor(self)
87
+
88
+ self.is_split = False
84
89
  # ................................................................
85
90
 
91
+ # --------------------------------------------------------------------------------------
92
+ def split(self, validation_samples_pc=0.10,
93
+ preprocessing: DataPreprocessingKind | None = DataPreprocessingKind.STANDARDIZE,
94
+ random_seed: int=2021):
95
+
96
+ nTS_Samples, nVS_Samples, nTS_Labels, nVS_Labels = train_test_split(self.all_samples, self.all_labels,
97
+ test_size=validation_samples_pc,
98
+ random_state=random_seed)
99
+ if preprocessing == DataPreprocessingKind.MIN_MAX_NORMALIZE:
100
+ self.preprocessor = MinMaxScaler().fit(nTS_Samples)
101
+ elif preprocessing == DataPreprocessingKind.STANDARDIZE:
102
+ self.preprocessor = StandardScaler().fit(nTS_Samples)
103
+ else:
104
+ self.preprocessor = None
105
+
106
+ if self.preprocessor is not None:
107
+ nTS_Samples = self.preprocessor.transform(nTS_Samples)
108
+ nVS_Samples = self.preprocessor.transform(nVS_Samples)
109
+
110
+ # (Re)creating the subsets of the dataset after the splits have been created
111
+ self.ts = SampleSet(self, nTS_Samples, nTS_Labels, kind=SampleSetKind.TRAINING_SET)
112
+ if preprocessing == DataPreprocessingKind.STANDARDIZE:
113
+ self.ts.mean = self.preprocessor.mean_
114
+ self.ts.std = self.preprocessor.scale_
115
+
116
+ self.vs = SampleSet(self, nVS_Samples, nVS_Labels, kind=SampleSetKind.VALIDATION_SET)
117
+
118
+ self.is_split = True
119
+ return self
86
120
  # --------------------------------------------------------------------------------------------------------------------
87
121
  @property
88
122
  def dataset_code(self):
@@ -115,11 +149,32 @@ class DataSetBase(ABC):
115
149
  def load_data(self):
116
150
  pass # Must implement
117
151
  # --------------------------------------------------------------------------------------------------------------------
118
- def load_cache(self):
119
- pass # Optionally override
152
+ def load_cache(self, is_vector_samples=True, is_last_axis_features=True):
153
+ nSuffix = ""
154
+ if is_vector_samples:
155
+ nSuffix = "-vec"
156
+ elif not is_last_axis_features:
157
+ nSuffix = "-torch"
158
+
159
+ nTSSamples = self.fs.obj.load(f"{self.dataset_code}-TS-Samples{nSuffix}.pkl")
160
+ nVSSamples = self.fs.obj.load(f"{self.dataset_code}-VS-Samples{nSuffix}.pkl")
161
+
162
+ nTSLabels = self.fs.obj.load(f"{self.dataset_code}-TS-Labels{nSuffix}.pkl")
163
+ nVSLabels = self.fs.obj.load(f"{self.dataset_code}-VS-Labels{nSuffix}.pkl")
164
+
165
+ return nTSSamples, nVSSamples, nTSLabels, nVSLabels
120
166
  # --------------------------------------------------------------------------------------------------------------------
121
- def save_cache(self):
122
- pass # Optionally override
167
+ def save_cache(self, ts_samples, vs_samples, ts_labels, vs_labels, is_vector_samples=True, is_last_axis_features=True):
168
+ nSuffix = ""
169
+ if is_vector_samples:
170
+ nSuffix = "-vec"
171
+ elif not is_last_axis_features:
172
+ nSuffix = "-torch"
173
+ self.fs.obj.save(ts_samples, f"{self.dataset_code}-TS-Samples{nSuffix}.pkl")
174
+ self.fs.obj.save(vs_samples, f"{self.dataset_code}-VS-Samples{nSuffix}.pkl")
175
+
176
+ self.fs.obj.save(ts_labels, f"{self.dataset_code}-TS-Labels{nSuffix}.pkl")
177
+ self.fs.obj.save(vs_labels, f"{self.dataset_code}-VS-Labels{nSuffix}.pkl")
123
178
  # --------------------------------------------------------------------------------------------------------------------
124
179
  def prepare(self, hyperparams: dict | None = None):
125
180
  self.hprm = hyperparams
@@ -134,23 +189,29 @@ class DataSetBase(ABC):
134
189
  self.callbacks.lazy_download(self.fs)
135
190
 
136
191
  if (self.random_seed is not None):
137
- assert self.callbacks is not None, ERR_NO_CALLBACKS
138
- assert self.callbacks._random_seed is not None, ERR_DS_NO_RANDOM_SEED_INITIALIZER_CALLBACK
139
- self.callbacks.random_seed(self.random_seed)
140
-
192
+ bIsInitRandomSeed = False
193
+ if self.callbacks is not None:
194
+ if self.callbacks._random_seed is not None:
195
+ self.callbacks.random_seed(self.random_seed)
196
+ bIsInitRandomSeed = True
197
+ if not bIsInitRandomSeed:
198
+ mlsys.random_seed_all(self.random_seed)
199
+
141
200
  self.ts = None
142
201
  self.vs = None
143
202
  self.us = None
144
- # VIRTUAL CALL: Imports the data from the source local/remote filestore to the local cache.
203
+
204
+ # VIRTUAL CALL: Imports the dataset from the source local/remote filestore to the local cache.
145
205
  self.load_data()
146
206
 
147
- assert self.ts is not None, ERR_DS_SUBSET_MUST_HAVE_TS
148
- assert self.ts.kind == SampleSetKind.TRAINING_SET, ERR_DS_SUBSET_INVALID_SETUP
149
- if self.vs is not None:
207
+ if self.is_split:
208
+ assert self.ts is not None, ERR_DS_SUBSET_MUST_HAVE_TS
150
209
  assert self.ts.kind == SampleSetKind.TRAINING_SET, ERR_DS_SUBSET_INVALID_SETUP
210
+ if self.vs is not None:
211
+ assert self.vs.kind == SampleSetKind.VALIDATION_SET, ERR_DS_SUBSET_INVALID_SETUP
212
+
151
213
  if self.us is not None:
152
- assert self.ts.kind == SampleSetKind.TRAINING_SET, ERR_DS_SUBSET_INVALID_SETUP
153
-
214
+ assert self.us.kind == SampleSetKind.UNKNOWN_TEST_SET, ERR_DS_SUBSET_INVALID_SETUP
154
215
 
155
216
  return self
156
217
  # --------------------------------------------------------------------------------------------------------------------
@@ -25,7 +25,7 @@
25
25
 
26
26
 
27
27
  import numpy as np
28
- from sklearn.model_selection import train_test_split # import a standalone procedure function from the pacakge
28
+ from sklearn.model_selection import train_test_split # import a standalone procedure toyfunction from the pacakge
29
29
 
30
30
 
31
31
  # =========================================================================================================================
@@ -28,7 +28,7 @@ ERR_MLSYS_FILESYS_NOT_INITIALIZED = "The filesystem for the Machine Learning hos
28
28
 
29
29
  ERR_NO_CALLBACKS = "You should assign callbacks to the dataset perform proper random seed initialization for your framework."
30
30
  ERR_DS_NO_RANDOM_SEED_INITIALIZER_CALLBACK = "Callback method for random seed initialization has not been defined."
31
- ERR_DS_CALLBACKS_NO_LAZY_DOWNLOADER = "Callback method for downloading the data has not been defined."
31
+ ERR_DS_CALLBACKS_NO_LAZY_DOWNLOADER = "Callback method for downloading the dataset has not been defined."
32
32
 
33
33
  ERR_DS_SUBSET_MUST_HAVE_TS = "A dataset must have at least a training subset."
34
34
  ERR_DS_SUBSET_INVALID_SETUP = "Invalid sample subset setup. Please use one of the valid kinds: 'training/train/ts', 'validation/val/vs', 'testing/test/us'."
@@ -47,6 +47,9 @@ class SamplePreprocessor(ABC):
47
47
  pass
48
48
  # --------------------------------------------------------------------------------------------------------------------
49
49
 
50
+
51
+
52
+
50
53
  # ======================================================================================================================
51
54
  class VoidPreprocessor(SamplePreprocessor):
52
55
  # --------------------------------------------------------------------------------------------------------------------
@@ -11,7 +11,10 @@ class SampleSet(object):
11
11
  if self.ids is None:
12
12
  self.ids = np.arange(len(self.samples)) + 1
13
13
  self.kind: SampleSetKind = kind
14
-
14
+
15
+ self.mean = None
16
+ self.std = None
17
+
15
18
  self.loader = None
16
19
  self._sample_count = None
17
20
  self._minibatch_count = None
@@ -49,7 +49,7 @@ from .ml_experiment_store import MLExperimentStore
49
49
 
50
50
 
51
51
  # --------------------------------------------------------------------------------------------------------------------
52
- # Define a custom sorting function
52
+ # Define a custom sorting toyfunction
53
53
  def _sort_by_last_path_element(folder):
54
54
  # Split the path into its components
55
55
  components = folder.split(os.pathsep)
@@ -226,7 +226,7 @@ class MLExperiment:
226
226
  self.model_fs.log_fs.json.save(dTiming, f"timing_info_{self._end_train_time.strftime('%Y-%m-%dT%H%M%S')}.json",
227
227
  is_sorted_keys=False)
228
228
 
229
- # //TODO: Keep cost function names and other learning parameters for evaluation
229
+ # //TODO: Keep cost toyfunction names and other learning parameters for evaluation
230
230
  # --------------------------------------------------------------------------------------------------------------------
231
231
  def load(self, use_last_checkpoint=False, model_root_folder=None):
232
232
  self._currentModelFolder = self.model_fs.base_folder
@@ -1,49 +1,55 @@
1
1
  import numpy as np
2
2
  from radnn.system.filesystem import FileStore
3
+
4
+
3
5
  class MLExperimentLog:
4
6
  # --------------------------------------------------------------------------------------------------------------------
5
- def __init__(self, filename: str, experiment_info: dict | None = None):
7
+ def __init__(self, filename: str, experiment_info: dict | None = None, is_autoinit: bool = False):
6
8
  self.filename = filename
7
9
  if experiment_info is None:
8
10
  experiment_info = {}
9
11
  self.experiment_info = experiment_info
10
- self.logs = { "experiment": experiment_info,
11
- "epoch": [],
12
- "epoch_time": [],
13
- "train_step_loss": [],
14
- "train_step_accuracy": [],
15
- "train_loss": [],
16
- "train_accuracy": [],
17
- "val_loss": [],
18
- "val_accuracy": [],
19
- "learning_rate": []
20
- }
12
+ self.is_autoinit = is_autoinit
13
+ if self.is_autoinit:
14
+ self.logs = {"experiment": experiment_info, "epoch": [], "epoch_time": []}
15
+ else:
16
+ self.logs = {"experiment": experiment_info, "epoch": [], "epoch_time": [],
17
+ "train_step_loss": [],
18
+ "train_step_accuracy": [],
19
+ "train_loss": [],
20
+ "train_accuracy": [],
21
+ "val_loss": [],
22
+ "val_accuracy": [],
23
+ "learning_rate": []}
24
+
21
25
  # --------------------------------------------------------------------------------------------------------------------
22
- def assign_series(self, is_autoinit=False, **kwargs):
23
- if is_autoinit:
26
+ def assign_series(self, **kwargs):
27
+ if self.is_autoinit:
24
28
  for key, value in kwargs.items():
25
29
  if key not in self.logs:
26
30
  self.logs[key] = []
27
-
31
+
28
32
  for key, value in kwargs.items():
29
33
  self.logs[key] = value
34
+
30
35
  # --------------------------------------------------------------------------------------------------------------------
31
- def append(self, is_autoinit=False, **kwargs):
32
- if is_autoinit:
36
+ def append(self, **kwargs):
37
+ if self.is_autoinit:
33
38
  for key, value in kwargs.items():
34
39
  if key not in self.logs:
35
40
  self.logs[key] = []
36
-
41
+
37
42
  for key, value in kwargs.items():
38
43
  self.logs[key].append(value)
39
44
  return self
45
+
40
46
  # --------------------------------------------------------------------------------------------------------------------
41
47
  def load(self, experiment_fs: FileStore):
42
48
  self.logs = experiment_fs.json.load(self.filename)
43
49
  return self
50
+
44
51
  # --------------------------------------------------------------------------------------------------------------------
45
52
  def save(self, experiment_fs: FileStore):
46
53
  experiment_fs.json.save(self.logs, self.filename)
47
54
  return self
48
55
  # --------------------------------------------------------------------------------------------------------------------
49
-
@@ -72,7 +72,7 @@ class WindowsHost(object):
72
72
  def set_windows_sleep_resolution(cls, msecs=1):
73
73
  """
74
74
  Requests a minimum resolution for periodic timers. This increases accuracy
75
- for the waiting interval of the time.sleep function
75
+ for the waiting interval of the time.sleep toyfunction
76
76
  """
77
77
  oWinMM = ctypes.WinDLL('oWinMM')
78
78
  oWinMM.timeBeginPeriod(msecs)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: radnn
3
- Version: 0.1.5
3
+ Version: 0.1.6
4
4
  Summary: Rapid Deep Neural Networks
5
5
  Author-email: "Pantelis I. Kaplanoglou" <pikaplanoglou@ihu.gr>
6
6
  License-Expression: MIT
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes