nnodely 0.14.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. mplplots/__init__.py +0 -0
  2. mplplots/plots.py +131 -0
  3. nnodely/__init__.py +42 -0
  4. nnodely/activation.py +85 -0
  5. nnodely/arithmetic.py +203 -0
  6. nnodely/earlystopping.py +81 -0
  7. nnodely/exporter/__init__.py +3 -0
  8. nnodely/exporter/export.py +275 -0
  9. nnodely/exporter/exporter.py +45 -0
  10. nnodely/exporter/reporter.py +48 -0
  11. nnodely/exporter/standardexporter.py +108 -0
  12. nnodely/fir.py +150 -0
  13. nnodely/fuzzify.py +221 -0
  14. nnodely/initializer.py +31 -0
  15. nnodely/input.py +131 -0
  16. nnodely/linear.py +130 -0
  17. nnodely/localmodel.py +82 -0
  18. nnodely/logger.py +94 -0
  19. nnodely/loss.py +30 -0
  20. nnodely/model.py +263 -0
  21. nnodely/modeldef.py +205 -0
  22. nnodely/nnodely.py +1295 -0
  23. nnodely/optimizer.py +91 -0
  24. nnodely/output.py +23 -0
  25. nnodely/parameter.py +103 -0
  26. nnodely/parametricfunction.py +329 -0
  27. nnodely/part.py +201 -0
  28. nnodely/relation.py +149 -0
  29. nnodely/trigonometric.py +67 -0
  30. nnodely/utils.py +101 -0
  31. nnodely/visualizer/__init__.py +4 -0
  32. nnodely/visualizer/dynamicmpl/functionplot.py +34 -0
  33. nnodely/visualizer/dynamicmpl/fuzzyplot.py +31 -0
  34. nnodely/visualizer/dynamicmpl/resultsplot.py +28 -0
  35. nnodely/visualizer/dynamicmpl/trainingplot.py +46 -0
  36. nnodely/visualizer/mplnotebookvisualizer.py +66 -0
  37. nnodely/visualizer/mplvisualizer.py +215 -0
  38. nnodely/visualizer/textvisualizer.py +320 -0
  39. nnodely/visualizer/visualizer.py +84 -0
  40. nnodely-0.14.0.dist-info/LICENSE +21 -0
  41. nnodely-0.14.0.dist-info/METADATA +401 -0
  42. nnodely-0.14.0.dist-info/RECORD +44 -0
  43. nnodely-0.14.0.dist-info/WHEEL +5 -0
  44. nnodely-0.14.0.dist-info/top_level.txt +2 -0
nnodely/optimizer.py ADDED
@@ -0,0 +1,91 @@
1
+ import copy
2
+ import torch
3
+
4
+ from nnodely.utils import check
5
+
6
+ class Optimizer:
7
+ def __init__(self, name, optimizer_defaults = {}, optimizer_params = []):
8
+ self.name = name
9
+ self.optimizer_defaults = copy.deepcopy(optimizer_defaults)
10
+ self.optimizer_params = self.unfold(copy.deepcopy(optimizer_params))
11
+ self.all_params = None
12
+ self.params_to_train = None
13
+
14
+ def set_params_to_train(self, all_params, params_to_train):
15
+ self.all_params = all_params
16
+ self.params_to_train = params_to_train
17
+ if self.optimizer_params == []:
18
+ for param_name in self.all_params.keys():
19
+ if param_name in self.params_to_train:
20
+ self.optimizer_params.append({'params': param_name})
21
+ else:
22
+ self.optimizer_params.append({'params': param_name, 'lr': 0.0})
23
+
24
+ def set_defaults(self, optimizer_defaults):
25
+ self.optimizer_defaults = optimizer_defaults
26
+
27
+ def set_params(self, optimizer_params):
28
+ self.optimizer_params = self.unfold(optimizer_params)
29
+
30
+ def unfold(self, params):
31
+ optimizer_params = []
32
+ check(type(params) is list, KeyError, f'The params {params} must be a list')
33
+ for param in params:
34
+ if type(param['params']) is list:
35
+ par_copy = copy.deepcopy(param)
36
+ del par_copy['params']
37
+ for par in param['params']:
38
+ optimizer_params.append({'params':par}|par_copy)
39
+ else:
40
+ optimizer_params.append(param)
41
+ return optimizer_params
42
+
43
+ def add_defaults(self, option_name, params, overwrite = True):
44
+ if params is not None:
45
+ if overwrite:
46
+ self.optimizer_defaults[option_name] = params
47
+ elif option_name not in self.optimizer_defaults:
48
+ self.optimizer_defaults[option_name] = params
49
+
50
+ def add_option_to_params(self, option_name, params, overwrite = True):
51
+ if params is None:
52
+ return
53
+ for key, value in params.items():
54
+ check(self.all_params is not None, RuntimeError, "Call set_params before add_option_to_params")
55
+ old_key = False
56
+ for param in self.optimizer_params:
57
+ if param['params'] == key:
58
+ old_key = True
59
+ if overwrite:
60
+ param[option_name] = value
61
+ elif option_name not in param:
62
+ param[option_name] = value
63
+ if old_key == False:
64
+ self.optimizer_params.append({'params': key, option_name: value})
65
+
66
+ def replace_key_with_params(self):
67
+ params = copy.deepcopy(self.optimizer_params)
68
+ for param in params:
69
+ if type(param['params']) is list:
70
+ for ind, par in enumerate(param['params']):
71
+ param['params'][ind] = self.all_params[par]
72
+ else:
73
+ param['params'] = self.all_params[param['params']]
74
+ return params
75
+
76
+ def get_torch_optimizer(self):
77
+ raise NotImplemented('The function get_torch_optimizer must be implemented.')
78
+
79
+ class SGD(Optimizer):
80
+ def __init__(self, optimizer_defaults = {}, optimizer_params = []):
81
+ super(SGD, self).__init__('SGD', optimizer_defaults, optimizer_params)
82
+
83
+ def get_torch_optimizer(self):
84
+ return torch.optim.SGD(self.replace_key_with_params(), **self.optimizer_defaults)
85
+
86
+ class Adam(Optimizer):
87
+ def __init__(self, optimizer_defaults = {}, optimizer_params = []):
88
+ super(Adam, self).__init__('Adam', optimizer_defaults, optimizer_params)
89
+
90
+ def get_torch_optimizer(self):
91
+ return torch.optim.Adam(self.replace_key_with_params(), **self.optimizer_defaults)
nnodely/output.py ADDED
@@ -0,0 +1,23 @@
1
+ from pprint import pformat
2
+
3
+ from nnodely.relation import Stream
4
+ from nnodely.utils import check
5
+
6
+ from nnodely.logger import logging, nnLogger
7
+ log = nnLogger(__name__, logging.CRITICAL)
8
+
9
+ class Output(Stream):
10
+ def __init__(self, name, relation):
11
+ super().__init__(name, relation.json, relation.dim)
12
+ log.debug(f"Output {name}")
13
+ self.json['Outputs'][name] = {}
14
+ self.json['Outputs'][name] = relation.name
15
+ log.debug("\n"+pformat(self.json))
16
+
17
+ def closedLoop(self, obj):
18
+ check(False, TypeError,
19
+ f"The {self} must be a Stream and not a {type(self)}.")
20
+
21
+ def connect(self, obj):
22
+ check(False, TypeError,
23
+ f"The {self} must be a Stream and not a {type(self)}.")
nnodely/parameter.py ADDED
@@ -0,0 +1,103 @@
1
+ import copy, inspect, textwrap
2
+ import numpy as np
3
+
4
+ from collections.abc import Callable
5
+
6
+ from nnodely.relation import NeuObj, Stream
7
+ from nnodely.utils import check, enforce_types
8
+
9
+ def is_numpy_float(var):
10
+ return isinstance(var, (np.float16, np.float32, np.float64))
11
+
12
+ class Constant(NeuObj, Stream):
13
+ @enforce_types
14
+ def __init__(self, name:str,
15
+ values:list|float|int|np.ndarray,
16
+ tw:float|int|None = None,
17
+ sw:int|None = None):
18
+
19
+ NeuObj.__init__(self, name)
20
+ values = np.array(values)
21
+ shape = values.shape
22
+ values = values.tolist()
23
+ if len(shape) == 0:
24
+ self.dim = {'dim': 1}
25
+ else:
26
+ check(len(shape) >= 2, ValueError,
27
+ f"The shape of a Constant must have at least 2 dimensions or zero.")
28
+ dimensions = shape[1] if len(shape[1:]) == 1 else list(shape[1:])
29
+ self.dim = {'dim': dimensions}
30
+ if tw is not None:
31
+ check(sw is None, ValueError, "If tw is set sw must be None")
32
+ self.dim['tw'] = tw
33
+ elif sw is not None:
34
+ self.dim['sw'] = sw
35
+ check(shape[0] == self.dim['sw'],ValueError, f"The sw = {sw} is different from sw = {shape[0]} of the values.")
36
+ else:
37
+ self.dim['sw'] = shape[0]
38
+
39
+ # deepcopy dimention information inside Parameters
40
+ self.json['Constants'][self.name] = copy.deepcopy(self.dim)
41
+ self.json['Constants'][self.name]['values'] = values
42
+ Stream.__init__(self, name, self.json, self.dim)
43
+
44
+ class Parameter(NeuObj, Stream):
45
+ @enforce_types
46
+ def __init__(self, name:str,
47
+ dimensions:int|list|tuple|None = None,
48
+ tw:float|int|None = None,
49
+ sw:int|None = None,
50
+ values:list|float|int|np.ndarray|None = None,
51
+ init:Callable|None = None,
52
+ init_params:dict|None = None):
53
+
54
+ NeuObj.__init__(self, name)
55
+ dimensions = list(dimensions) if type(dimensions) is tuple else dimensions
56
+ if values is None:
57
+ if dimensions is None:
58
+ dimensions = 1
59
+ self.dim = {'dim': dimensions}
60
+ if tw is not None:
61
+ check(sw is None, ValueError, "If tw is set sw must be None")
62
+ self.dim['tw'] = tw
63
+ elif sw is not None:
64
+ self.dim['sw'] = sw
65
+
66
+ # deepcopy dimention information inside Parameters
67
+ self.json['Parameters'][self.name] = copy.deepcopy(self.dim)
68
+ else:
69
+ values = np.array(values)
70
+ shape = values.shape
71
+ values = values.tolist()
72
+ check(len(shape) >= 2, ValueError,
73
+ f"The shape of a parameter must have at least 2 dimensions.")
74
+ values_dimensions = shape[1] if len(shape[1:]) == 1 else list(shape[1:])
75
+ if dimensions is None:
76
+ dimensions = values_dimensions
77
+ else:
78
+ check(dimensions == values_dimensions, ValueError,
79
+ f"The dimensions = {dimensions} are different from dimensions = {values_dimensions} of the values.")
80
+ self.dim = {'dim': dimensions}
81
+
82
+ if tw is not None:
83
+ check(sw is None, ValueError, "If tw is set sw must be None")
84
+ self.dim['tw'] = tw
85
+ elif sw is not None:
86
+ self.dim['sw'] = sw
87
+ check(shape[0] == self.dim['sw'],ValueError, f"The sw = {sw} is different from sw = {shape[0]} of the values.")
88
+ else:
89
+ self.dim['sw'] = shape[0]
90
+
91
+ # deepcopy dimention information inside Parameters
92
+ self.json['Parameters'][self.name] = copy.deepcopy(self.dim)
93
+ self.json['Parameters'][self.name]['values'] = values
94
+
95
+ if init is not None:
96
+ check('values' not in self.json['Parameters'][self.name], ValueError, f"The parameter {self.name} is already initialized.")
97
+ check(inspect.isfunction(init), ValueError,f"The init parameter must be a function.")
98
+ code = textwrap.dedent(inspect.getsource(init)).replace('\"', '\'')
99
+ self.json['Parameters'][self.name]['init_fun'] = { 'code' : code, 'name' : init.__name__}
100
+ if init_params is not None:
101
+ self.json['Parameters'][self.name]['init_fun']['params'] = init_params
102
+
103
+ Stream.__init__(self, name, self.json, self.dim)
@@ -0,0 +1,329 @@
1
+ import inspect, copy, textwrap, torch, math
2
+
3
+ import torch.nn as nn
4
+ import numpy as np
5
+
6
+ from collections.abc import Callable
7
+
8
+ from nnodely.relation import NeuObj, Stream, toStream
9
+ from nnodely.model import Model
10
+ from nnodely.parameter import Parameter, Constant
11
+ from nnodely.utils import check, merge, enforce_types
12
+
13
+
14
+ paramfun_relation_name = 'ParamFun'
15
+
16
+ class ParamFun(NeuObj):
17
+ @enforce_types
18
+ def __init__(self, param_fun:Callable,
19
+ constants:list|dict|None = None,
20
+ parameters_dimensions:list|dict|None = None,
21
+ parameters:list|dict|None = None,
22
+ map_over_batch:bool = False) -> Stream:
23
+
24
+ self.relation_name = paramfun_relation_name
25
+
26
+ # input parameters
27
+ self.param_fun = param_fun
28
+ self.constants = constants
29
+ self.parameters_dimensions = parameters_dimensions
30
+ self.parameters = parameters
31
+ self.map_over_batch = map_over_batch
32
+
33
+ self.output_dimension = {}
34
+ super().__init__('F'+paramfun_relation_name + str(NeuObj.count))
35
+ code = textwrap.dedent(inspect.getsource(param_fun)).replace('\"', '\'')
36
+ self.json['Functions'][self.name] = {
37
+ 'code' : code,
38
+ 'name' : param_fun.__name__,
39
+ }
40
+ self.json['Functions'][self.name]['params_and_consts'] = []
41
+
42
+ def __call__(self, *obj):
43
+ stream_name = paramfun_relation_name + str(Stream.count)
44
+
45
+ funinfo = inspect.getfullargspec(self.param_fun)
46
+ n_function_input = len(funinfo.args)
47
+ n_call_input = len(obj)
48
+ n_new_constants_and_params = n_function_input - n_call_input
49
+
50
+ if 'n_input' not in self.json['Functions'][self.name]:
51
+ self.json['Functions'][self.name]['n_input'] = n_call_input
52
+ self.__set_params_and_consts(n_new_constants_and_params)
53
+
54
+ input_dimensions = []
55
+ input_types = []
56
+ for ind, o in enumerate(obj):
57
+ if type(o) in (int,float,list):
58
+ obj_type = Constant
59
+ else:
60
+ obj_type = type(o)
61
+ o = toStream(o)
62
+ check(type(o) is Stream, TypeError,
63
+ f"The type of {o} is {type(o)} and is not supported for ParamFun operation.")
64
+ input_types.append(obj_type)
65
+ input_dimensions.append(o.dim)
66
+
67
+ self.json['Functions'][self.name]['in_dim'] = copy.deepcopy(input_dimensions)
68
+ self.__infer_output_dimensions(input_types, input_dimensions)
69
+ self.json['Functions'][self.name]['out_dim'] = copy.deepcopy(self.output_dimension)
70
+
71
+ # Create the missing parameters
72
+ missing_params = n_new_constants_and_params - len(self.json['Functions'][self.name]['params_and_consts'])
73
+ check(missing_params == 0, ValueError, f"The function is called with different number of inputs.")
74
+
75
+ stream_json = copy.deepcopy(self.json)
76
+ input_names = []
77
+ for ind, o in enumerate(obj):
78
+ o = toStream(o)
79
+ check(type(o) is Stream, TypeError,
80
+ f"The type of {o} is {type(o)} and is not supported for ParamFun operation.")
81
+ stream_json = merge(stream_json, o.json)
82
+ input_names.append(o.name)
83
+
84
+ output_dimension = copy.deepcopy(self.output_dimension)
85
+ stream_json['Relations'][stream_name] = [paramfun_relation_name, input_names, self.name]
86
+
87
+ return Stream(stream_name, stream_json, output_dimension)
88
+
89
+ def __set_params_and_consts(self, n_new_constants_and_params):
90
+ funinfo = inspect.getfullargspec(self.param_fun)
91
+
92
+ # Create the missing constants from list
93
+ if type(self.constants) is list:
94
+ for const in self.constants:
95
+ if type(const) is Constant:
96
+ self.json['Functions'][self.name]['params_and_consts'].append(const.name)
97
+ self.json['Constants'][const.name] = copy.deepcopy(const.json['Constants'][const.name])
98
+ elif type(const) is str:
99
+ self.json['Functions'][self.name]['params_and_consts'].append(const)
100
+ self.json['Constants'][const] = {'dim': 1}
101
+ else:
102
+ check(type(const) is Constant or type(const) is str, TypeError,
103
+ 'The element inside the \"constants\" list must be a Constant or str')
104
+
105
+ # Create the missing parameters from list
106
+ if type(self.parameters) is list:
107
+ check(self.parameters_dimensions is None, ValueError,
108
+ '\"parameters_dimensions\" must be None if \"parameters\" is set using list')
109
+ for param in self.parameters:
110
+ if type(param) is Parameter:
111
+ self.json['Functions'][self.name]['params_and_consts'].append(param.name)
112
+ self.json['Parameters'][param.name] = copy.deepcopy(param.json['Parameters'][param.name])
113
+ elif type(param) is str:
114
+ self.json['Functions'][self.name]['params_and_consts'].append(param)
115
+ self.json['Parameters'][param] = {'dim': 1}
116
+ else:
117
+ check(type(param) is Parameter or type(param) is str, TypeError,
118
+ 'The element inside the \"parameters\" list must be a Parameter or str')
119
+ elif type(self.parameters_dimensions) is list:
120
+ for i, param_dim in enumerate(self.parameters_dimensions):
121
+ idx = i + len(funinfo.args) - len(self.parameters_dimensions)
122
+ param_name = self.name + str(idx)
123
+ self.json['Functions'][self.name]['params_and_consts'].append(param_name)
124
+ self.json['Parameters'][param_name] = {'dim': list(self.parameters_dimensions[i])}
125
+
126
+ # Create the missing parameters and constants from dict
127
+ missing_params = n_new_constants_and_params - len(self.json['Functions'][self.name]['params_and_consts'])
128
+ if missing_params or type(self.constants) is dict or type(self.parameters) is dict or type(self.parameters_dimensions) is dict:
129
+ n_input = len(funinfo.args) - missing_params
130
+ n_elem_dict = (len(self.constants if type(self.constants) is dict else [])
131
+ + len(self.parameters if type(self.parameters) is dict else [])
132
+ + len(self.parameters_dimensions if type(self.parameters_dimensions) is dict else []))
133
+ for i, key in enumerate(funinfo.args):
134
+ if i >= n_input:
135
+ if type(self.parameters) is dict and key in self.parameters:
136
+ if self.parameters_dimensions:
137
+ check(key in self.parameters_dimensions, TypeError,
138
+ f'The parameter {key} must be removed from \"parameters_dimensions\".')
139
+ param = self.parameters[key]
140
+ if type(self.parameters[key]) is Parameter:
141
+ self.json['Functions'][self.name]['params_and_consts'].append(param.name)
142
+ self.json['Parameters'][param.name] = copy.deepcopy(param.json['Parameters'][param.name])
143
+ elif type(self.parameters[key]) is str:
144
+ self.json['Functions'][self.name]['params_and_consts'].append(param)
145
+ self.json['Parameters'][param] = {'dim': 1}
146
+ else:
147
+ check(type(param) is Parameter or type(param) is str, TypeError,
148
+ 'The element inside the \"parameters\" dict must be a Parameter or str')
149
+ n_elem_dict -= 1
150
+ elif type(self.parameters_dimensions) is dict and key in self.parameters_dimensions:
151
+ param_name = self.name + key
152
+ dim = self.parameters_dimensions[key]
153
+ check(isinstance(dim,(list,tuple,int)), TypeError,
154
+ 'The element inside the \"parameters_dimensions\" dict must be a tuple or int')
155
+ self.json['Functions'][self.name]['params_and_consts'].append(param_name)
156
+ self.json['Parameters'][param_name] = {'dim': list(dim) if type(dim) is tuple else dim}
157
+ n_elem_dict -= 1
158
+ elif type(self.constants) is dict and key in self.constants:
159
+ const = self.constants[key]
160
+ if type(self.constants[key]) is Constant:
161
+ self.json['Functions'][self.name]['params_and_consts'].append(const.name)
162
+ self.json['Constants'][const.name] = copy.deepcopy(const.json['Constants'][const.name])
163
+ elif type(self.constants[key]) is str:
164
+ self.json['Functions'][self.name]['params_and_consts'].append(const)
165
+ self.json['Constants'][const] = {'dim': 1}
166
+ else:
167
+ check(type(const) is Constant or type(const) is str, TypeError,
168
+ 'The element inside the \"constants\" dict must be a Constant or str')
169
+ n_elem_dict -= 1
170
+ else:
171
+ param_name = self.name + key
172
+ self.json['Functions'][self.name]['params_and_consts'].append(param_name)
173
+ self.json['Parameters'][param_name] = {'dim': 1}
174
+ check(n_elem_dict == 0, ValueError, 'Some of the input parameters are not used in the function.')
175
+
176
+ def __infer_output_dimensions(self, input_types, input_dimensions):
177
+ import torch
178
+ batch_dim = 5
179
+
180
+ all_inputs_dim = input_dimensions
181
+ all_inputs_type = input_types
182
+ params_and_consts = self.json['Constants'] | self.json['Parameters']
183
+ for name in self.json['Functions'][self.name]['params_and_consts']:
184
+ all_inputs_dim.append(params_and_consts[name])
185
+ all_inputs_type.append(Constant)
186
+
187
+ n_samples_sec = 0.1
188
+ is_int = False
189
+ while is_int == False:
190
+ n_samples_sec *= 10
191
+ vect_input_time = [math.isclose(d['tw']*n_samples_sec,round(d['tw']*n_samples_sec)) for d in all_inputs_dim if 'tw' in d]
192
+ if len(vect_input_time) == 0:
193
+ is_int = True
194
+ else:
195
+ is_int = sum(vect_input_time) == len(vect_input_time)
196
+
197
+ # Build input with right dimensions
198
+ inputs = []
199
+ inputs_win_type = []
200
+ inputs_win = []
201
+ input_map_dim = ()
202
+
203
+ for t, dim in zip(all_inputs_type,all_inputs_dim):
204
+ window = 'tw' if 'tw' in dim else ('sw' if 'sw' in dim else None)
205
+ if window == 'tw':
206
+ dim_win = round(dim[window] * n_samples_sec)
207
+ elif window == 'sw':
208
+ dim_win = dim[window]
209
+ else:
210
+ dim_win = 1
211
+ if t in (Parameter, Constant):
212
+ if self.map_over_batch:
213
+ input_map_dim += (None,)
214
+ if type(dim['dim']) is list:
215
+ inputs.append(torch.rand(size=(dim_win,) + tuple(dim['dim'])))
216
+ else:
217
+ inputs.append(torch.rand(size=(dim_win, dim['dim'])))
218
+ else:
219
+ inputs.append(torch.rand(size=(batch_dim, dim_win, dim['dim'])))
220
+ if self.map_over_batch:
221
+ input_map_dim += (0,)
222
+
223
+ inputs_win_type.append(window)
224
+ inputs_win.append(dim_win)
225
+
226
+ if self.map_over_batch:
227
+ self.json['Functions'][self.name]['map_over_dim'] = list(input_map_dim)
228
+ function_to_call = torch.func.vmap(self.param_fun,in_dims=input_map_dim)
229
+ else:
230
+ self.json['Functions'][self.name]['map_over_dim'] = False
231
+ function_to_call = self.param_fun
232
+ out = function_to_call(*inputs)
233
+ out_shape = out.shape
234
+ check(out_shape[0] == batch_dim, ValueError, "The batch output dimension it is not correct.")
235
+ out_dim = list(out_shape[2:])
236
+ check(len(out_dim) == 1, ValueError, "The output dimension of the function is bigger than a vector.")
237
+ out_win_from_input = False
238
+ for idx, win in enumerate(inputs_win):
239
+ if out_shape[1] == win and all_inputs_type[idx] not in (Parameter, Constant):
240
+ out_win_from_input = True
241
+ out_win_type = inputs_win_type[idx]
242
+ out_win = all_inputs_dim[idx][out_win_type]
243
+ if out_win_from_input == False:
244
+ out_win_type = 'sw'
245
+ out_win = out_shape[1]
246
+ self.output_dimension = {'dim': out_dim[0], out_win_type : out_win}
247
+
248
+ def return_standard_inputs(json, model_def, xlim = None, num_points = 1000):
249
+ check(json['n_input'] == 1 or json['n_input'] == 2, ValueError, "The function must have only one or two inputs.")
250
+ fun_inputs = tuple()
251
+ for i in range(json['n_input']):
252
+ dim = json['in_dim'][i]
253
+ check(dim['dim'] == 1, ValueError, "The input dimension must be 1.")
254
+ if 'tw' in dim:
255
+ check(dim['tw'] == model_def['Info']['SampleTime'], ValueError, "The input window must be 1.")
256
+ elif 'sw' in dim:
257
+ check(dim['sw'] == 1, ValueError, "The input window must be 1.")
258
+ if xlim is not None:
259
+ if json['n_input'] == 2:
260
+ check(np.array(xlim).shape == (json['n_input'], 2), ValueError,
261
+ "The xlim must have the same shape as the number of inputs.")
262
+ x_value = np.linspace(xlim[i][0], xlim[i][1], num=num_points)
263
+ else:
264
+ check(np.array(xlim).shape == (2,), ValueError,
265
+ "The xlim must have the same shape as the number of inputs.")
266
+ x_value = np.linspace(xlim[0], xlim[1], num=num_points)
267
+ else:
268
+ x_value = np.linspace(0, 1, num=num_points)
269
+ if i == 0:
270
+ x0_value = torch.from_numpy(x_value)
271
+ else:
272
+ x1_value = torch.from_numpy(x_value)
273
+
274
+ if json['n_input'] == 2:
275
+ x0_value, x1_value = torch.meshgrid(x0_value,x1_value,indexing="xy")
276
+ x0_value = x0_value.flatten().unsqueeze(1).unsqueeze(1)
277
+ x1_value = x1_value.flatten().unsqueeze(1).unsqueeze(1)
278
+ fun_inputs += (x0_value,x1_value,)
279
+ else:
280
+ x0_value = x0_value.unsqueeze(1).unsqueeze(1)
281
+ fun_inputs += (x0_value,)
282
+
283
+ for key in json['params_and_consts']:
284
+ val = model_def['Parameters'][key] if key in model_def['Parameters'] else model_def['Constants'][key]
285
+ fun_inputs += tuple([torch.from_numpy(np.array(val['values']))]) # The vector is transform in a tuple
286
+
287
+ return fun_inputs
288
+
289
+ def return_function(json, fun_inputs):
290
+ exec(json['code'], globals())
291
+ function_to_call = globals()[json['name']]
292
+ output = function_to_call(*fun_inputs)
293
+ check(output.shape[1] == 1, ValueError, "The output dimension must be 1.")
294
+ check(output.shape[2] == 1, ValueError, "The output window must be 1.")
295
+ funinfo = inspect.getfullargspec(function_to_call)
296
+ return output, funinfo.args
297
+
298
+
299
+ class Parametric_Layer(nn.Module):
300
+ def __init__(self, func, params_and_consts, map_over_batch):
301
+ super().__init__()
302
+ self.name = func['name']
303
+ self.params_and_consts = params_and_consts
304
+ if type(map_over_batch) is list:
305
+ self.map_over_batch = True
306
+ self.input_map_dim = tuple(map_over_batch)
307
+ else:
308
+ self.map_over_batch = False
309
+ ## Add the function to the globals
310
+ try:
311
+ code = 'import torch\n@torch.fx.wrap\n' + func['code']
312
+ exec(code, globals())
313
+ except Exception as e:
314
+ print(f"An error occurred: {e}")
315
+
316
+ def forward(self, *inputs):
317
+ args = list(inputs) + self.params_and_consts
318
+ # Retrieve the function object from the globals dictionary
319
+ function_to_call = globals()[self.name]
320
+ # Call the function using the retrieved function object
321
+ if self.map_over_batch:
322
+ function_to_call = torch.func.vmap(function_to_call,in_dims=self.input_map_dim)
323
+ result = function_to_call(*args)
324
+ return result
325
+
326
+ def createParamFun(self, *func_params):
327
+ return Parametric_Layer(func=func_params[0], params_and_consts=func_params[1], map_over_batch=func_params[2])
328
+
329
+ setattr(Model, paramfun_relation_name, createParamFun)