@spiky-panda/core 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (130) hide show
  1. package/dist/geometry/geometry.cartesian.d.ts +56 -0
  2. package/dist/geometry/geometry.cartesian.js +229 -0
  3. package/dist/geometry/geometry.cartesian.js.map +1 -0
  4. package/dist/geometry/geometry.interfaces.d.ts +37 -0
  5. package/dist/geometry/geometry.interfaces.js +31 -0
  6. package/dist/geometry/geometry.interfaces.js.map +1 -0
  7. package/dist/geometry/index.d.ts +2 -0
  8. package/dist/geometry/index.js +3 -0
  9. package/dist/geometry/index.js.map +1 -0
  10. package/dist/graph/graph.builder.graph.d.ts +44 -0
  11. package/dist/graph/graph.builder.graph.js +78 -0
  12. package/dist/graph/graph.builder.graph.js.map +1 -0
  13. package/dist/graph/graph.builder.node.d.ts +20 -0
  14. package/dist/graph/graph.builder.node.js +45 -0
  15. package/dist/graph/graph.builder.node.js.map +1 -0
  16. package/dist/graph/graph.builder.olink.d.ts +12 -0
  17. package/dist/graph/graph.builder.olink.js +32 -0
  18. package/dist/graph/graph.builder.olink.js.map +1 -0
  19. package/dist/graph/graph.graph.d.ts +13 -0
  20. package/dist/graph/graph.graph.js +26 -0
  21. package/dist/graph/graph.graph.js.map +1 -0
  22. package/dist/graph/graph.graphItem.d.ts +14 -0
  23. package/dist/graph/graph.graphItem.js +36 -0
  24. package/dist/graph/graph.graphItem.js.map +1 -0
  25. package/dist/graph/graph.interfaces.builder.d.ts +28 -0
  26. package/dist/graph/graph.interfaces.builder.js +2 -0
  27. package/dist/graph/graph.interfaces.builder.js.map +1 -0
  28. package/dist/graph/graph.interfaces.d.ts +59 -0
  29. package/dist/graph/graph.interfaces.js +51 -0
  30. package/dist/graph/graph.interfaces.js.map +1 -0
  31. package/dist/graph/graph.node.d.ts +12 -0
  32. package/dist/graph/graph.node.js +22 -0
  33. package/dist/graph/graph.node.js.map +1 -0
  34. package/dist/graph/graph.olink.d.ts +13 -0
  35. package/dist/graph/graph.olink.js +56 -0
  36. package/dist/graph/graph.olink.js.map +1 -0
  37. package/dist/graph/index.d.ts +9 -0
  38. package/dist/graph/index.js +10 -0
  39. package/dist/graph/index.js.map +1 -0
  40. package/dist/index.d.ts +6 -0
  41. package/dist/index.js +15 -0
  42. package/dist/index.js.map +1 -0
  43. package/dist/neuralnetwork/ann/index.d.ts +1 -0
  44. package/dist/neuralnetwork/ann/index.js +2 -0
  45. package/dist/neuralnetwork/ann/index.js.map +1 -0
  46. package/dist/neuralnetwork/ann/mlp/index.d.ts +9 -0
  47. package/dist/neuralnetwork/ann/mlp/index.js +10 -0
  48. package/dist/neuralnetwork/ann/mlp/index.js.map +1 -0
  49. package/dist/neuralnetwork/ann/mlp/mlp.activation.d.ts +22 -0
  50. package/dist/neuralnetwork/ann/mlp/mlp.activation.js +22 -0
  51. package/dist/neuralnetwork/ann/mlp/mlp.activation.js.map +1 -0
  52. package/dist/neuralnetwork/ann/mlp/mlp.builder.d.ts +43 -0
  53. package/dist/neuralnetwork/ann/mlp/mlp.builder.js +168 -0
  54. package/dist/neuralnetwork/ann/mlp/mlp.builder.js.map +1 -0
  55. package/dist/neuralnetwork/ann/mlp/mlp.graph.d.ts +7 -0
  56. package/dist/neuralnetwork/ann/mlp/mlp.graph.js +7 -0
  57. package/dist/neuralnetwork/ann/mlp/mlp.graph.js.map +1 -0
  58. package/dist/neuralnetwork/ann/mlp/mlp.inference.d.ts +13 -0
  59. package/dist/neuralnetwork/ann/mlp/mlp.inference.js +63 -0
  60. package/dist/neuralnetwork/ann/mlp/mlp.inference.js.map +1 -0
  61. package/dist/neuralnetwork/ann/mlp/mlp.interfaces.d.ts +21 -0
  62. package/dist/neuralnetwork/ann/mlp/mlp.interfaces.js +7 -0
  63. package/dist/neuralnetwork/ann/mlp/mlp.interfaces.js.map +1 -0
  64. package/dist/neuralnetwork/ann/mlp/mlp.neuron.d.ts +10 -0
  65. package/dist/neuralnetwork/ann/mlp/mlp.neuron.js +20 -0
  66. package/dist/neuralnetwork/ann/mlp/mlp.neuron.js.map +1 -0
  67. package/dist/neuralnetwork/ann/mlp/mlp.runtime.utils.d.ts +5 -0
  68. package/dist/neuralnetwork/ann/mlp/mlp.runtime.utils.js +48 -0
  69. package/dist/neuralnetwork/ann/mlp/mlp.runtime.utils.js.map +1 -0
  70. package/dist/neuralnetwork/ann/mlp/mlp.synapse.d.ts +6 -0
  71. package/dist/neuralnetwork/ann/mlp/mlp.synapse.js +7 -0
  72. package/dist/neuralnetwork/ann/mlp/mlp.synapse.js.map +1 -0
  73. package/dist/neuralnetwork/ann/mlp/training/index.d.ts +4 -0
  74. package/dist/neuralnetwork/ann/mlp/training/index.js +5 -0
  75. package/dist/neuralnetwork/ann/mlp/training/index.js.map +1 -0
  76. package/dist/neuralnetwork/ann/mlp/training/mlp.loss.d.ts +10 -0
  77. package/dist/neuralnetwork/ann/mlp/training/mlp.loss.js +11 -0
  78. package/dist/neuralnetwork/ann/mlp/training/mlp.loss.js.map +1 -0
  79. package/dist/neuralnetwork/ann/mlp/training/mlp.optimizers.d.ts +7 -0
  80. package/dist/neuralnetwork/ann/mlp/training/mlp.optimizers.js +59 -0
  81. package/dist/neuralnetwork/ann/mlp/training/mlp.optimizers.js.map +1 -0
  82. package/dist/neuralnetwork/ann/mlp/training/mlp.training.d.ts +22 -0
  83. package/dist/neuralnetwork/ann/mlp/training/mlp.training.interfaces.d.ts +27 -0
  84. package/dist/neuralnetwork/ann/mlp/training/mlp.training.interfaces.js +2 -0
  85. package/dist/neuralnetwork/ann/mlp/training/mlp.training.interfaces.js.map +1 -0
  86. package/dist/neuralnetwork/ann/mlp/training/mlp.training.js +107 -0
  87. package/dist/neuralnetwork/ann/mlp/training/mlp.training.js.map +1 -0
  88. package/dist/neuralnetwork/index.d.ts +8 -0
  89. package/dist/neuralnetwork/index.js +9 -0
  90. package/dist/neuralnetwork/index.js.map +1 -0
  91. package/dist/neuralnetwork/nn.builders.d.ts +21 -0
  92. package/dist/neuralnetwork/nn.builders.js +71 -0
  93. package/dist/neuralnetwork/nn.builders.js.map +1 -0
  94. package/dist/neuralnetwork/nn.interfaces.builder.d.ts +16 -0
  95. package/dist/neuralnetwork/nn.interfaces.builder.js +2 -0
  96. package/dist/neuralnetwork/nn.interfaces.builder.js.map +1 -0
  97. package/dist/neuralnetwork/nn.interfaces.d.ts +16 -0
  98. package/dist/neuralnetwork/nn.interfaces.js +18 -0
  99. package/dist/neuralnetwork/nn.interfaces.js.map +1 -0
  100. package/dist/neuralnetwork/nn.neuron.d.ts +8 -0
  101. package/dist/neuralnetwork/nn.neuron.js +10 -0
  102. package/dist/neuralnetwork/nn.neuron.js.map +1 -0
  103. package/dist/neuralnetwork/nn.synapse.d.ts +6 -0
  104. package/dist/neuralnetwork/nn.synapse.js +14 -0
  105. package/dist/neuralnetwork/nn.synapse.js.map +1 -0
  106. package/dist/neuralnetwork/nn.weights.d.ts +42 -0
  107. package/dist/neuralnetwork/nn.weights.js +120 -0
  108. package/dist/neuralnetwork/nn.weights.js.map +1 -0
  109. package/dist/neuralnetwork/snn/index.d.ts +3 -0
  110. package/dist/neuralnetwork/snn/index.js +4 -0
  111. package/dist/neuralnetwork/snn/index.js.map +1 -0
  112. package/dist/neuralnetwork/snn/spike.interfaces.d.ts +50 -0
  113. package/dist/neuralnetwork/snn/spike.interfaces.js +24 -0
  114. package/dist/neuralnetwork/snn/spike.interfaces.js.map +1 -0
  115. package/dist/neuralnetwork/snn/spike.runtime.d.ts +31 -0
  116. package/dist/neuralnetwork/snn/spike.runtime.js +90 -0
  117. package/dist/neuralnetwork/snn/spike.runtime.js.map +1 -0
  118. package/dist/neuralnetwork/snn/spike.stdp.d.ts +11 -0
  119. package/dist/neuralnetwork/snn/spike.stdp.js +33 -0
  120. package/dist/neuralnetwork/snn/spike.stdp.js.map +1 -0
  121. package/dist/types.d.ts +3 -0
  122. package/dist/types.js +2 -0
  123. package/dist/types.js.map +1 -0
  124. package/dist/utils/csv.d.ts +6 -0
  125. package/dist/utils/csv.js +36 -0
  126. package/dist/utils/csv.js.map +1 -0
  127. package/dist/utils/index.d.ts +1 -0
  128. package/dist/utils/index.js +2 -0
  129. package/dist/utils/index.js.map +1 -0
  130. package/package.json +44 -0
@@ -0,0 +1,43 @@
1
+ import { GraphNodeBuilder } from "../../../graph";
2
+ import { ILayer } from "../../nn.interfaces";
3
+ import { ILayerConnectionBuilder } from "../../nn.interfaces.builder";
4
+ import { IActivationFunction, IMlpGraph, IMlpNeuron } from "./mlp.interfaces";
5
+ export declare class MlpNeuronBuilder extends GraphNodeBuilder {
6
+ _bias?: number;
7
+ _activationFn?: IActivationFunction;
8
+ withBias(bias: number): MlpNeuronBuilder;
9
+ withActivationFn(fn: IActivationFunction): MlpNeuronBuilder;
10
+ build(...args: any[]): IMlpNeuron;
11
+ }
12
+ export declare class MlpLayerBuilder {
13
+ _count: number;
14
+ _neuronBuilder?: MlpNeuronBuilder;
15
+ withNeuron(bias: number | MlpNeuronBuilder, fn?: IActivationFunction): MlpLayerBuilder;
16
+ withCount(count: number): MlpLayerBuilder;
17
+ build(...args: any[]): ILayer<IMlpNeuron>;
18
+ }
19
+ export declare class PerceptronBuilder {
20
+ _inputLayerBuilder: MlpLayerBuilder;
21
+ _hiddenLayerBuilders: Array<MlpLayerBuilder>;
22
+ _outputLayerBuilder: MlpLayerBuilder;
23
+ _hiddenLayerCount: number;
24
+ _layerCount: number;
25
+ _defaultLayerConnBuilder: ILayerConnectionBuilder;
26
+ _connectionBuilders: Array<{
27
+ conn: ILayerConnectionBuilder;
28
+ from: number;
29
+ to: number;
30
+ }>;
31
+ withInputLayer(count: number | MlpLayerBuilder, bias?: number, activation?: IActivationFunction): PerceptronBuilder;
32
+ withHiddenLayer(count: number | MlpLayerBuilder | [{
33
+ count: number;
34
+ bias?: number;
35
+ activation?: IActivationFunction;
36
+ conn?: ILayerConnectionBuilder;
37
+ }], bias?: number, activation?: IActivationFunction): PerceptronBuilder;
38
+ withOutputLayer(count: number | MlpLayerBuilder, bias?: number, activation?: IActivationFunction): PerceptronBuilder;
39
+ withConnectionBuilder(connBuilder?: ILayerConnectionBuilder, from?: number, to?: number): PerceptronBuilder;
40
+ withDefaultConnectionBuilder(conn: ILayerConnectionBuilder): PerceptronBuilder;
41
+ build(...args: any[]): IMlpGraph;
42
+ reset(): PerceptronBuilder;
43
+ }
@@ -0,0 +1,168 @@
1
+ import { GraphBuilder, GraphNodeBuilder } from "../../../graph";
2
+ import { LayerConnectionBuilder } from "../../nn.builders";
3
+ export class MlpNeuronBuilder extends GraphNodeBuilder {
4
+ withBias(bias) {
5
+ this._bias = bias;
6
+ return this;
7
+ }
8
+ withActivationFn(fn) {
9
+ this._activationFn = fn;
10
+ return this;
11
+ }
12
+ build(...args) {
13
+ if (this._bias == undefined) {
14
+ throw new Error("Bias must be provided.");
15
+ }
16
+ if (this._activationFn == undefined) {
17
+ throw new Error("Activation function must be provided.");
18
+ }
19
+ const neuron = super.build(...args);
20
+ neuron.bias = this._bias;
21
+ neuron.activationFn = this._activationFn;
22
+ return neuron;
23
+ }
24
+ }
25
+ export class MlpLayerBuilder {
26
+ constructor() {
27
+ this._count = 0;
28
+ }
29
+ withNeuron(bias, fn) {
30
+ if (bias instanceof MlpNeuronBuilder) {
31
+ this._neuronBuilder = bias;
32
+ return this;
33
+ }
34
+ this._neuronBuilder = this._neuronBuilder ?? new MlpNeuronBuilder();
35
+ this._neuronBuilder.withBias(bias);
36
+ if (fn) {
37
+ this._neuronBuilder.withActivationFn(fn);
38
+ }
39
+ return this;
40
+ }
41
+ withCount(count) {
42
+ this._count = count;
43
+ return this;
44
+ }
45
+ build(...args) {
46
+ if (!this._neuronBuilder) {
47
+ throw new Error("Neuron builder is not defined. Please set it using withNeuron() method.");
48
+ }
49
+ if (this._count <= 0) {
50
+ throw new Error("Neuron count must be greater than zero.");
51
+ }
52
+ const neurons = [];
53
+ for (let i = 0; i < this._count; i++) {
54
+ neurons.push(this._neuronBuilder.build(...args));
55
+ }
56
+ return neurons;
57
+ }
58
+ }
59
+ export class PerceptronBuilder {
60
+ constructor() {
61
+ this._inputLayerBuilder = new MlpLayerBuilder();
62
+ this._hiddenLayerBuilders = [];
63
+ this._outputLayerBuilder = new MlpLayerBuilder();
64
+ this._hiddenLayerCount = 0;
65
+ this._layerCount = 0;
66
+ this._defaultLayerConnBuilder = new LayerConnectionBuilder();
67
+ this._connectionBuilders = [];
68
+ }
69
+ withInputLayer(count, bias, activation) {
70
+ if (count instanceof MlpLayerBuilder) {
71
+ this._inputLayerBuilder = count;
72
+ this._layerCount++;
73
+ return this;
74
+ }
75
+ if (bias == undefined || activation == undefined) {
76
+ throw new Error("Bias and activation function must be provided when using count.");
77
+ }
78
+ this._inputLayerBuilder = this._inputLayerBuilder ?? new MlpLayerBuilder();
79
+ this._inputLayerBuilder.withCount(count).withNeuron(bias, activation);
80
+ this._layerCount++;
81
+ return this;
82
+ }
83
+ withHiddenLayer(count, bias, activation) {
84
+ if (count instanceof MlpLayerBuilder) {
85
+ this._hiddenLayerBuilders[this._hiddenLayerCount] = count;
86
+ this._layerCount++;
87
+ return this;
88
+ }
89
+ if (Array.isArray(count)) {
90
+ for (const layer of count) {
91
+ this.withHiddenLayer(layer.count, layer.bias, layer.activation);
92
+ if (layer.conn) {
93
+ this.withConnectionBuilder(layer.conn);
94
+ }
95
+ }
96
+ return this;
97
+ }
98
+ if (bias == undefined || activation == undefined) {
99
+ throw new Error("Bias and activation function must be provided when using count.");
100
+ }
101
+ this._hiddenLayerBuilders[this._hiddenLayerCount] = this._hiddenLayerBuilders[this._hiddenLayerCount] ?? new MlpLayerBuilder();
102
+ this._hiddenLayerBuilders[this._hiddenLayerCount].withCount(count).withNeuron(bias, activation);
103
+ this._layerCount++;
104
+ return this;
105
+ }
106
+ withOutputLayer(count, bias, activation) {
107
+ if (count instanceof MlpLayerBuilder) {
108
+ this._outputLayerBuilder = count;
109
+ this._layerCount++;
110
+ return this;
111
+ }
112
+ if (bias == undefined || activation == undefined) {
113
+ throw new Error("Bias and activation function must be provided when using count.");
114
+ }
115
+ this._outputLayerBuilder = this._outputLayerBuilder ?? new MlpLayerBuilder();
116
+ this._outputLayerBuilder.withCount(count).withNeuron(bias, activation);
117
+ this._layerCount++;
118
+ return this;
119
+ }
120
+ withConnectionBuilder(connBuilder, from, to) {
121
+ const b = connBuilder ?? this._defaultLayerConnBuilder ?? new LayerConnectionBuilder();
122
+ const f = from != undefined ? from : this._layerCount - 2;
123
+ const t = to != undefined ? to : this._layerCount - 1;
124
+ if (f >= 0 && t >= 0) {
125
+ this._connectionBuilders.push({ conn: b, from: f, to: t });
126
+ }
127
+ return this;
128
+ }
129
+ withDefaultConnectionBuilder(conn) {
130
+ this._defaultLayerConnBuilder = conn;
131
+ return this;
132
+ }
133
+ build(...args) {
134
+ const builder = new GraphBuilder();
135
+ const layers = [];
136
+ layers.push(this._inputLayerBuilder.build(...args));
137
+ layers.push(...this._hiddenLayerBuilders.map((b) => b.build(...args)));
138
+ layers.push(this._outputLayerBuilder.build(...args));
139
+ for (let i = 0; i != this._connectionBuilders.length; i++) {
140
+ let layerIndex = this._connectionBuilders[i].from;
141
+ if (layerIndex < 0 || layerIndex >= layers.length) {
142
+ continue;
143
+ }
144
+ const a = layers[layerIndex];
145
+ layerIndex = this._connectionBuilders[i].to;
146
+ if (layerIndex < 0 || layerIndex >= layers.length) {
147
+ continue;
148
+ }
149
+ const b = layers[layerIndex];
150
+ const links = this._connectionBuilders[i].conn.build(a, b);
151
+ if (links)
152
+ builder.withLinks(links);
153
+ }
154
+ builder.withNodes(layers.flat());
155
+ const g = builder.build();
156
+ return g;
157
+ }
158
+ reset() {
159
+ this._inputLayerBuilder = new MlpLayerBuilder();
160
+ this._hiddenLayerBuilders = [];
161
+ this._outputLayerBuilder = new MlpLayerBuilder();
162
+ this._hiddenLayerCount = 0;
163
+ this._defaultLayerConnBuilder = new LayerConnectionBuilder();
164
+ this._connectionBuilders = [];
165
+ return this;
166
+ }
167
+ }
168
+ //# sourceMappingURL=mlp.builder.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"mlp.builder.js","sourceRoot":"","sources":["../../../../src/neuralnetwork/ann/mlp/mlp.builder.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,gBAAgB,CAAC;AAChE,OAAO,EAAE,sBAAsB,EAAE,MAAM,mBAAmB,CAAC;AAM3D,MAAM,OAAO,gBAAiB,SAAQ,gBAAgB;IAI3C,QAAQ,CAAC,IAAY;QACxB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;QAClB,OAAO,IAAI,CAAC;IAChB,CAAC;IACM,gBAAgB,CAAC,EAAuB;QAC3C,IAAI,CAAC,aAAa,GAAG,EAAE,CAAC;QACxB,OAAO,IAAI,CAAC;IAChB,CAAC;IACM,KAAK,CAAC,GAAG,IAAW;QACvB,IAAI,IAAI,CAAC,KAAK,IAAI,SAAS,EAAE,CAAC;YAC1B,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC,CAAC;QAC9C,CAAC;QACD,IAAI,IAAI,CAAC,aAAa,IAAI,SAAS,EAAE,CAAC;YAClC,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;QAC7D,CAAC;QACD,MAAM,MAAM,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,IAAI,CAAe,CAAC;QAClD,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC;QACzB,MAAM,CAAC,YAAY,GAAG,IAAI,CAAC,aAAa,CAAC;QACzC,OAAO,MAAM,CAAC;IAClB,CAAC;CACJ;AAED,MAAM,OAAO,eAAe;IAA5B;QACI,WAAM,GAAW,CAAC,CAAC;IAkCvB,CAAC;IA/BU,UAAU,CAAC,IAA+B,EAAE,EAAwB;QACvE,IAAI,IAAI,YAAY,gBAAgB,EAAE,CAAC;YACnC,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC;YAC3B,OAAO,IAAI,CAAC;QAChB,CAAC;QACD,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,cAAc,IAAI,IAAI,gBAAgB,EAAE,CAAC;QACpE,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACnC,IAAI,EAAE,EAAE,CAAC;YACL,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,EAAE,CAAC,CAAC;QAC7C,CAAC;QACD,OAAO,IAAI,CAAC;IAChB,CAAC;IAEM,SAAS,CAAC,KAAa;QAC1B,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC;QACpB,OAAO,IAAI,CAAC;IAChB,CAAC;IAEM,KAAK,CAAC,GAAG,IAAW;QACvB,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,CAAC;YACvB,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;QAC/F,CAAC;QACD,IAAI,IAAI,CAAC,MAAM,IAAI,CAAC,EAAE,CAAC;YACnB,MAAM,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;QAC/D,CAAC;QACD,MAAM,OAAO,GAAuB,EAAE,CAAC;QACvC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACnC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC;QACrD,CAAC;QACD,OAAO,OAAO,CAAC;IACnB,CAAC;CACJ;AAED,MAAM,OAAO,iBAAiB;IAA9B;QACI,uBAAkB,GAAoB,IAAI,eAAe,EAAE,CAAC;QAC5D,yBAAoB,GAA2B,EAAE,CAAC;QAClD,wBAAmB,GAAoB,IAAI,eAAe,EAAE,CAAC;QAC7D,sBAAiB,GAAW,CAAC,CAAC;QAC9B,gBAAW,GAAW,CAAC,CAAC;QACxB,6BAAwB,GAA4B,IAAI,sBAAsB,EAAE,CAAC;QACjF,wBAAmB,GAAuE,EAAE,CAAC;IAiHjG,CAAC;IA/GU,cAAc,CAAC,KAA+B,EAAE,IAAa,EAAE,UAAgC;QAClG,IAAI,KAAK,YAAY,eAAe,EAAE,CAAC;YACnC,IAAI,CAAC,kBAAkB,GAAG,KAAK,CAAC;YAChC,IAAI,CAAC,WAAW,EAAE,CAAC;YACnB,OAAO,IAAI,CAAC;QAChB,CAAC;QACD,IAAI,IAAI,IAAI,SAAS,IAAI,UAAU,IAAI,SAAS,EAAE,CAAC;YAC/C,MAAM,IAAI,KAAK,CAAC,iEAAiE,CAAC,CAAC;QACvF,CAAC;QACD,IAAI,CAAC,kBAAkB,GAAG,IAAI,CAAC,kBAAkB,IAAI,IAAI,eAAe,EAAE,CAAC;QAC3E,IAAI,CAAC,kBAAkB,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC;QACtE,IAAI,CAAC,WAAW,EAAE,CAAC;QACnB,OAAO,IAAI,CAAC;IAChB,CAAC;IAEM,eAAe,CAClB,KAAsI,EACtI,IAAa,EACb,UAAgC;QAEhC,IAAI,KAAK,YAAY,eAAe,EAAE,CAAC;YACnC,IAAI,CAAC,oBAAoB,CAAC,IAAI,CAAC,iBAAiB,CAAC,GAAG,KAAK,CAAC;YAC1D,IAAI,CAAC,WAAW,EAAE,CAAC;YACnB,OAAO,IAAI,CAAC;QAChB,CAAC;QACD,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC;YACvB,KAAK,MAAM,KAAK,IAAI,KAAK,EAAE,CAAC;gBACxB,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;gBAChE,IAAI,KAAK,CAAC,IAAI,EAAE,CAAC;oBACb,IAAI,CAAC,qBAAqB,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;gBAC3C,CAAC;YACL,CAAC;YACD,OAAO,IAAI,CAAC;QAChB,CAAC;QACD,IAAI,IAAI,IAAI,SAAS,IAAI,UAAU,IAAI,SAAS,EAAE,CAAC;YAC/C,MAAM,IAAI,KAAK,CAAC,iEAAiE,CAAC,CAAC;QACvF,CAAC;QACD,IAAI,CAAC,oBAAoB,CAAC,IAAI,CAAC,iBAAiB,CAAC,GAAG,IAAI,CAAC,oBAAoB,CAAC,IAAI,CAAC,iBAAiB,CAAC,IAAI,IAAI,eAAe,EAAE,CAAC;QAC/H,IAAI,CAAC,oBAAoB,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC;QAChG,IAAI,CAAC,WAAW,EAAE,CAAC;QACnB,OAAO,IAAI,CAAC;IAChB,CAAC;IAEM,eAAe,CAAC,KAA+B,EAAE,IAAa,EAAE,UAAgC;QACnG,IAAI,KAAK,YAAY,eAAe,EAAE,CAAC;YACnC,IAAI,CAAC,mBAAmB,GAAG,KAAK,CAAC;YACjC,IAAI,CAAC,WAAW,EAAE,CAAC;YACnB,OAAO,IAAI,CAAC;QAChB,CAAC;QACD,IAAI,IAAI,IAAI,SAAS,IAAI,UAAU,IAAI,SAAS,EAAE,CAAC;YAC/C,MAAM,IAAI,KAAK,CAAC,iEAAiE,CAAC,CAAC;QACvF,CAAC;QACD,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,IAAI,IAAI,eAAe,EAAE,CAAC;QAC7E,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC;QACvE,IAAI,CAAC,WAAW,EAAE,CAAC;QACnB,OAAO,IAAI,CAAC;IAChB,CAAC;IAEM,qBAAqB,CAAC,WAAqC,EAAE,IAAa,EAAE,EAAW;QAC1F,MAAM,CAAC,GAAG,WAAW,IAAI,IAAI,CAAC,wBAAwB,IAAI,IAAI,sBAAsB,EAAE,CAAC;QACvF,MAAM,CAAC,GAAG,IAAI,IAAI,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC;QAC1D,MAAM,CAAC,GAAG,EAAE,IAAI,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC;QACtD,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;YACnB,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC;QAC/D,CAAC;QACD,OAAO,IAAI,CAAC;IAChB,CAAC;IAEM,4BAA4B,CAAC,IAA6B;QAC7D,IAAI,CAAC,wBAAwB,GAAG,IAAI,CAAC;QACrC,OAAO,IAAI,CAAC;IAChB,CAAC;IAEM,KAAK,CAAC,GAAG,IAAW;QACvB,MAAM,OAAO,GAAG,IAAI,YAAY,EAAE,CAAC;QAEnC,MAAM,MAAM,GAAG,EAAE,CAAC;QAClB,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,kBAAkB,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC;QACpD,MAAM,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,oBAAoB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;QACvE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,mBAAmB,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC;QAErD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACxD,IAAI,UAAU,GAAG,IAAI,CAAC,mBAAmB,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;YAClD,IAAI,UAAU,GAAG,CAAC,IAAI,UAAU,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC;gBAChD,SAAS;YACb,CAAC;YACD,MAAM,CAAC,GAAG,MAAM,CAAC,UAAU,CAAC,CAAC;YAC7B,UAAU,GAAG,IAAI,CAAC,mBAAmB,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;YAC5C,IAAI,UAAU,GAAG,CAAC,IAAI,UAAU,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC;gBAChD,SAAS;YACb,CAAC;YACD,MAAM,CAAC,GAAG,MAAM,CAAC,UAAU,CAAC,CAAC;YAC7B,MAAM,KAAK,GAAG,IAAI,CAAC,mBAAmB,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;YAC3D,IAAI,KAAK;gBAAE,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;QACxC,CAAC;QAED,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,CAAC;QAEjC,MAAM,CAAC,GAAG,OAAO,CAAC,KAAK,EAAe,CAAC;QACvC,OAAO,CAAC,CAAC;IACb,CAAC;IAEM,KAAK;QACR,IAAI,CAAC,kBAAkB,GAAG,IAAI,eAAe,EAAE,CAAC;QAChD,IAAI,CAAC,oBAAoB,GAAG,EAAE,CAAC;QAC/B,IAAI,CAAC,mBAAmB,GAAG,IAAI,eAAe,EAAE,CAAC;QACjD,IAAI,CAAC,iBAAiB,GAAG,CAAC,CAAC;QAC3B,IAAI,CAAC,wBAAwB,GAAG,IAAI,sBAAsB,EAAE,CAAC;QAC7D,IAAI,CAAC,mBAAmB,GAAG,EAAE,CAAC;QAC9B,OAAO,IAAI,CAAC;IAChB,CAAC;CACJ","sourcesContent":["import { GraphBuilder, GraphNodeBuilder } from \"../../../graph\";\r\nimport { LayerConnectionBuilder } from \"../../nn.builders\";\r\nimport { ILayer } from \"../../nn.interfaces\";\r\nimport { ILayerConnectionBuilder } from \"../../nn.interfaces.builder\";\r\n\r\nimport { IActivationFunction, IMlpGraph, IMlpNeuron } from \"./mlp.interfaces\";\r\n\r\nexport class MlpNeuronBuilder extends GraphNodeBuilder {\r\n _bias?: number;\r\n _activationFn?: IActivationFunction;\r\n\r\n public withBias(bias: number): MlpNeuronBuilder {\r\n this._bias = bias;\r\n return this;\r\n }\r\n public withActivationFn(fn: IActivationFunction): MlpNeuronBuilder {\r\n this._activationFn = fn;\r\n return this;\r\n }\r\n public build(...args: any[]): IMlpNeuron {\r\n if (this._bias == undefined) {\r\n throw new Error(\"Bias must be provided.\");\r\n }\r\n if (this._activationFn == undefined) {\r\n throw new Error(\"Activation function must be provided.\");\r\n }\r\n const neuron = super.build(...args) as IMlpNeuron;\r\n neuron.bias = this._bias;\r\n neuron.activationFn = this._activationFn;\r\n return neuron;\r\n }\r\n}\r\n\r\nexport class MlpLayerBuilder {\r\n _count: number = 0;\r\n _neuronBuilder?: MlpNeuronBuilder;\r\n\r\n public withNeuron(bias: number | MlpNeuronBuilder, fn?: IActivationFunction): MlpLayerBuilder {\r\n if (bias instanceof MlpNeuronBuilder) {\r\n this._neuronBuilder = bias;\r\n return this;\r\n }\r\n this._neuronBuilder = this._neuronBuilder ?? new MlpNeuronBuilder();\r\n this._neuronBuilder.withBias(bias);\r\n if (fn) {\r\n this._neuronBuilder.withActivationFn(fn);\r\n }\r\n return this;\r\n }\r\n\r\n public withCount(count: number): MlpLayerBuilder {\r\n this._count = count;\r\n return this;\r\n }\r\n\r\n public build(...args: any[]): ILayer<IMlpNeuron> {\r\n if (!this._neuronBuilder) {\r\n throw new Error(\"Neuron builder is not defined. Please set it using withNeuron() method.\");\r\n }\r\n if (this._count <= 0) {\r\n throw new Error(\"Neuron count must be greater than zero.\");\r\n }\r\n const neurons: ILayer<IMlpNeuron> = [];\r\n for (let i = 0; i < this._count; i++) {\r\n neurons.push(this._neuronBuilder.build(...args));\r\n }\r\n return neurons;\r\n }\r\n}\r\n\r\nexport class PerceptronBuilder {\r\n _inputLayerBuilder: MlpLayerBuilder = new MlpLayerBuilder();\r\n _hiddenLayerBuilders: Array<MlpLayerBuilder> = [];\r\n _outputLayerBuilder: MlpLayerBuilder = new MlpLayerBuilder();\r\n _hiddenLayerCount: number = 0;\r\n _layerCount: number = 0;\r\n _defaultLayerConnBuilder: ILayerConnectionBuilder = new LayerConnectionBuilder();\r\n _connectionBuilders: Array<{ conn: ILayerConnectionBuilder; from: number; to: number }> = [];\r\n\r\n public withInputLayer(count: number | MlpLayerBuilder, bias?: number, activation?: IActivationFunction): PerceptronBuilder {\r\n if (count instanceof MlpLayerBuilder) {\r\n this._inputLayerBuilder = count;\r\n this._layerCount++;\r\n return this;\r\n }\r\n if (bias == undefined || activation == undefined) {\r\n throw new Error(\"Bias and activation function must be provided when using count.\");\r\n }\r\n this._inputLayerBuilder = this._inputLayerBuilder ?? new MlpLayerBuilder();\r\n this._inputLayerBuilder.withCount(count).withNeuron(bias, activation);\r\n this._layerCount++;\r\n return this;\r\n }\r\n\r\n public withHiddenLayer(\r\n count: number | MlpLayerBuilder | [{ count: number; bias?: number; activation?: IActivationFunction; conn?: ILayerConnectionBuilder }],\r\n bias?: number,\r\n activation?: IActivationFunction\r\n ): PerceptronBuilder {\r\n if (count instanceof MlpLayerBuilder) {\r\n this._hiddenLayerBuilders[this._hiddenLayerCount] = count;\r\n this._layerCount++;\r\n return this;\r\n }\r\n if (Array.isArray(count)) {\r\n for (const layer of count) {\r\n this.withHiddenLayer(layer.count, layer.bias, layer.activation);\r\n if (layer.conn) {\r\n this.withConnectionBuilder(layer.conn);\r\n }\r\n }\r\n return this;\r\n }\r\n if (bias == undefined || activation == undefined) {\r\n throw new Error(\"Bias and activation function must be provided when using count.\");\r\n }\r\n this._hiddenLayerBuilders[this._hiddenLayerCount] = this._hiddenLayerBuilders[this._hiddenLayerCount] ?? new MlpLayerBuilder();\r\n this._hiddenLayerBuilders[this._hiddenLayerCount].withCount(count).withNeuron(bias, activation);\r\n this._layerCount++;\r\n return this;\r\n }\r\n\r\n public withOutputLayer(count: number | MlpLayerBuilder, bias?: number, activation?: IActivationFunction): PerceptronBuilder {\r\n if (count instanceof MlpLayerBuilder) {\r\n this._outputLayerBuilder = count;\r\n this._layerCount++;\r\n return this;\r\n }\r\n if (bias == undefined || activation == undefined) {\r\n throw new Error(\"Bias and activation function must be provided when using count.\");\r\n }\r\n this._outputLayerBuilder = this._outputLayerBuilder ?? new MlpLayerBuilder();\r\n this._outputLayerBuilder.withCount(count).withNeuron(bias, activation);\r\n this._layerCount++;\r\n return this;\r\n }\r\n\r\n public withConnectionBuilder(connBuilder?: ILayerConnectionBuilder, from?: number, to?: number): PerceptronBuilder {\r\n const b = connBuilder ?? this._defaultLayerConnBuilder ?? new LayerConnectionBuilder();\r\n const f = from != undefined ? from : this._layerCount - 2;\r\n const t = to != undefined ? to : this._layerCount - 1;\r\n if (f >= 0 && t >= 0) {\r\n this._connectionBuilders.push({ conn: b, from: f, to: t });\r\n }\r\n return this;\r\n }\r\n\r\n public withDefaultConnectionBuilder(conn: ILayerConnectionBuilder): PerceptronBuilder {\r\n this._defaultLayerConnBuilder = conn;\r\n return this;\r\n }\r\n\r\n public build(...args: any[]): IMlpGraph {\r\n const builder = new GraphBuilder();\r\n\r\n const layers = [];\r\n layers.push(this._inputLayerBuilder.build(...args));\r\n layers.push(...this._hiddenLayerBuilders.map((b) => b.build(...args)));\r\n layers.push(this._outputLayerBuilder.build(...args));\r\n\r\n for (let i = 0; i != this._connectionBuilders.length; i++) {\r\n let layerIndex = this._connectionBuilders[i].from;\r\n if (layerIndex < 0 || layerIndex >= layers.length) {\r\n continue;\r\n }\r\n const a = layers[layerIndex];\r\n layerIndex = this._connectionBuilders[i].to;\r\n if (layerIndex < 0 || layerIndex >= layers.length) {\r\n continue;\r\n }\r\n const b = layers[layerIndex];\r\n const links = this._connectionBuilders[i].conn.build(a, b);\r\n if (links) builder.withLinks(links);\r\n }\r\n\r\n builder.withNodes(layers.flat());\r\n\r\n const g = builder.build() as IMlpGraph;\r\n return g;\r\n }\r\n\r\n public reset(): PerceptronBuilder {\r\n this._inputLayerBuilder = new MlpLayerBuilder();\r\n this._hiddenLayerBuilders = [];\r\n this._outputLayerBuilder = new MlpLayerBuilder();\r\n this._hiddenLayerCount = 0;\r\n this._defaultLayerConnBuilder = new LayerConnectionBuilder();\r\n this._connectionBuilders = [];\r\n return this;\r\n }\r\n}\r\n"]}
@@ -0,0 +1,7 @@
1
+ import { ICartesian } from "../../../geometry";
2
+ import { Graph } from "../../../graph";
3
+ import { Nullable } from "../../../types";
4
+ import { IMlpGraph, IMlpNeuron, IMlpSynapse } from "./mlp.interfaces";
5
+ export declare class MlpGraph extends Graph<IMlpNeuron, IMlpSynapse> implements IMlpGraph {
6
+ constructor(nodes?: IMlpNeuron[], links?: IMlpSynapse[], inputs?: Nullable<IMlpNeuron[]>, outputs?: Nullable<IMlpNeuron[]>, hiddens?: Nullable<IMlpNeuron[]>, onsc?: Nullable<IMlpSynapse[]>, opsc?: Nullable<IMlpSynapse[]>, position?: ICartesian);
7
+ }
@@ -0,0 +1,7 @@
1
+ import { Graph } from "../../../graph";
2
+ export class MlpGraph extends Graph {
3
+ constructor(nodes = [], links = [], inputs = null, outputs = null, hiddens = null, onsc = null, opsc = null, position) {
4
+ super(nodes, links, inputs, outputs, hiddens, onsc, opsc, position);
5
+ }
6
+ }
7
+ //# sourceMappingURL=mlp.graph.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"mlp.graph.js","sourceRoot":"","sources":["../../../../src/neuralnetwork/ann/mlp/mlp.graph.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,EAAE,MAAM,gBAAgB,CAAC;AAIvC,MAAM,OAAO,QAAS,SAAQ,KAA8B;IACxD,YACI,QAAsB,EAAE,EACxB,QAAuB,EAAE,EACzB,SAAiC,IAAI,EACrC,UAAkC,IAAI,EACtC,UAAkC,IAAI,EACtC,OAAgC,IAAI,EACpC,OAAgC,IAAI,EACpC,QAAqB;QAErB,KAAK,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAC;IACxE,CAAC;CACJ","sourcesContent":["import { ICartesian } from \"../../../geometry\";\r\nimport { Graph } from \"../../../graph\";\r\nimport { Nullable } from \"../../../types\";\r\nimport { IMlpGraph, IMlpNeuron, IMlpSynapse } from \"./mlp.interfaces\";\r\n\r\nexport class MlpGraph extends Graph<IMlpNeuron, IMlpSynapse> implements IMlpGraph {\r\n public constructor(\r\n nodes: IMlpNeuron[] = [],\r\n links: IMlpSynapse[] = [],\r\n inputs: Nullable<IMlpNeuron[]> = null,\r\n outputs: Nullable<IMlpNeuron[]> = null,\r\n hiddens: Nullable<IMlpNeuron[]> = null,\r\n onsc: Nullable<IMlpSynapse[]> = null,\r\n opsc: Nullable<IMlpSynapse[]> = null,\r\n position?: ICartesian\r\n ) {\r\n super(nodes, links, inputs, outputs, hiddens, onsc, opsc, position);\r\n }\r\n}\r\n"]}
@@ -0,0 +1,13 @@
1
+ import { IMlpGraph, IActivationFunction } from "./mlp.interfaces";
2
+ export declare class MLPInferenceRuntime {
3
+ readonly graph: IMlpGraph;
4
+ mainActivation: IActivationFunction;
5
+ constructor(graph: IMlpGraph, mainActivation?: IActivationFunction);
6
+ /**
7
+ * Runs inference on the MLP graph given an array of input values.
8
+ * The number of input values must match the number of input neurons.
9
+ */
10
+ run(inputValues: number[]): number[];
11
+ clearContext(): void;
12
+ deleteContext(): void;
13
+ }
@@ -0,0 +1,63 @@
1
+ import { ActivationFunctions } from "./mlp.activation";
2
+ import { MLPRuntimeUtils } from "./mlp.runtime.utils";
3
+ export class MLPInferenceRuntime {
4
+ constructor(graph, mainActivation = ActivationFunctions.relu) {
5
+ this.graph = graph;
6
+ this.mainActivation = mainActivation;
7
+ }
8
+ /**
9
+ * Runs inference on the MLP graph given an array of input values.
10
+ * The number of input values must match the number of input neurons.
11
+ */
12
+ run(inputValues) {
13
+ if (inputValues.length !== this.graph.inputs.length) {
14
+ throw new Error(`Input count mismatch: expected ${this.graph.inputs.length}, got ${inputValues.length}`);
15
+ }
16
+ const ready = [];
17
+ // Initialize context for all neurons
18
+ for (const neuron of this.graph.nodes) {
19
+ MLPRuntimeUtils.resetInferenceContext(neuron);
20
+ }
21
+ // Load input values and mark as ready
22
+ for (let i = 0; i < inputValues.length; i++) {
23
+ const neuron = this.graph.inputs[i];
24
+ const ctx = neuron.bag;
25
+ ctx.sum = inputValues[i];
26
+ ctx.activation = inputValues[i];
27
+ ctx.remainingInputs = 0;
28
+ ready.push(neuron);
29
+ }
30
+ // Process forward pass
31
+ while (ready.length > 0) {
32
+ const source = ready.shift();
33
+ const sourceCtx = source.bag;
34
+ const outputs = source.onsc() ?? [];
35
+ for (const syn of outputs) {
36
+ const target = syn.ofin;
37
+ const targetCtx = target.bag;
38
+ targetCtx.sum += sourceCtx.activation * syn.weight;
39
+ targetCtx.remainingInputs--;
40
+ if (targetCtx.remainingInputs === 0) {
41
+ targetCtx.sum += target.bias;
42
+ const afn = target.activationFn ?? this.mainActivation;
43
+ targetCtx.activation = afn.fn(targetCtx.sum);
44
+ ready.push(target);
45
+ }
46
+ }
47
+ }
48
+ // Return output activations
49
+ const results = this.graph.outputs.map((n) => n.bag.activation);
50
+ return results;
51
+ }
52
+ clearContext() {
53
+ for (const neuron of this.graph.nodes) {
54
+ MLPRuntimeUtils.resetInferenceContext(neuron);
55
+ }
56
+ }
57
+ deleteContext() {
58
+ for (const neuron of this.graph.nodes) {
59
+ neuron.bag = undefined;
60
+ }
61
+ }
62
+ }
63
+ //# sourceMappingURL=mlp.inference.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"mlp.inference.js","sourceRoot":"","sources":["../../../../src/neuralnetwork/ann/mlp/mlp.inference.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,mBAAmB,EAAE,MAAM,kBAAkB,CAAC;AAEvD,OAAO,EAAE,eAAe,EAAE,MAAM,qBAAqB,CAAC;AAEtD,MAAM,OAAO,mBAAmB;IAC5B,YAAmC,KAAgB,EAAS,iBAAsC,mBAAmB,CAAC,IAAI;QAAvF,UAAK,GAAL,KAAK,CAAW;QAAS,mBAAc,GAAd,cAAc,CAAgD;IAAG,CAAC;IAE9H;;;OAGG;IACH,GAAG,CAAC,WAAqB;QACrB,IAAI,WAAW,CAAC,MAAM,KAAK,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;YAClD,MAAM,IAAI,KAAK,CAAC,kCAAkC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,SAAS,WAAW,CAAC,MAAM,EAAE,CAAC,CAAC;QAC7G,CAAC;QAED,MAAM,KAAK,GAAiB,EAAE,CAAC;QAE/B,qCAAqC;QACrC,KAAK,MAAM,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC;YACpC,eAAe,CAAC,qBAAqB,CAAC,MAAM,CAAC,CAAC;QAClD,CAAC;QAED,sCAAsC;QACtC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YAC1C,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;YACpC,MAAM,GAAG,GAAG,MAAM,CAAC,GAA8B,CAAC;YAElD,GAAG,CAAC,GAAG,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC;YACzB,GAAG,CAAC,UAAU,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC;YAChC,GAAG,CAAC,eAAe,GAAG,CAAC,CAAC;YACxB,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QACvB,CAAC;QAED,uBAAuB;QACvB,OAAO,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACtB,MAAM,MAAM,GAAG,KAAK,CAAC,KAAK,EAAG,CAAC;YAC9B,MAAM,SAAS,GAAG,MAAM,CAAC,GAA8B,CAAC;YAExD,MAAM,OAAO,GAAG,MAAM,CAAC,IAAI,EAAe,IAAI,EAAE,CAAC;YACjD,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;gBACxB,MAAM,MAAM,GAAG,GAAG,CAAC,IAAkB,CAAC;gBACtC,MAAM,SAAS,GAAG,MAAM,CAAC,GAA8B,CAAC;gBAExD,SAAS,CAAC,GAAG,IAAI,SAAS,CAAC,UAAU,GAAG,GAAG,CAAC,MAAM,CAAC;gBACnD,SAAS,CAAC,eAAe,EAAE,CAAC;gBAE5B,IAAI,SAAS,CAAC,eAAe,KAAK,CAAC,EAAE,CAAC;oBAClC,SAAS,CAAC,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC;oBAC7B,MAAM,GAAG,GAAG,MAAM,CAAC,YAAY,IAAI,IAAI,CAAC,cAAc,CAAC;oBACvD,SAAS,CAAC,UAAU,GAAG,GAAG,CAAC,EAAE,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;oBAC7C,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;gBACvB,CAAC;YACL,CAAC;QACL,CAAC;QAED,4BAA4B;QAC5B,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAE,CAAC,CAAC,GAA+B,CAAC,UAAU,CAAC,CAAC;QAC7F,OAAO,OAAO,CAAC;IACnB,CAAC;IAEM,YAAY;QACf,KAAK,MAAM,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC;YACpC,eAAe,CAAC,qBAAqB,CAAC,MAAM,CAAC,CAAC;QAClD,CAAC;IACL,CAAC;IACM,aAAa;QAChB,KAAK,MAAM,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC;YACpC,MAAM,CAAC,GAAG,GAAG,SAAS,CAAC;QAC3B,CAAC;IACL,CAAC;CACJ","sourcesContent":["import { ActivationFunctions } from \"./mlp.activation\";\r\nimport { IMlpGraph, IMlpNeuron, IMlpSynapse, IInferenceNeuronContext, IActivationFunction } from \"./mlp.interfaces\";\r\nimport { MLPRuntimeUtils } from \"./mlp.runtime.utils\";\r\n\r\nexport class MLPInferenceRuntime {\r\n public constructor(public readonly graph: IMlpGraph, public mainActivation: IActivationFunction = ActivationFunctions.relu) {}\r\n\r\n /**\r\n * Runs inference on the MLP graph given an array of input values.\r\n * The number of input values must match the number of input neurons.\r\n */\r\n run(inputValues: number[]): number[] {\r\n if (inputValues.length !== this.graph.inputs.length) {\r\n throw new Error(`Input count mismatch: expected ${this.graph.inputs.length}, got ${inputValues.length}`);\r\n }\r\n\r\n const ready: IMlpNeuron[] = [];\r\n\r\n // Initialize context for all neurons\r\n for (const neuron of this.graph.nodes) {\r\n MLPRuntimeUtils.resetInferenceContext(neuron);\r\n }\r\n\r\n // Load input values and mark as ready\r\n for (let i = 0; i < inputValues.length; i++) {\r\n const neuron = this.graph.inputs[i];\r\n const ctx = neuron.bag as IInferenceNeuronContext;\r\n\r\n ctx.sum = inputValues[i];\r\n ctx.activation = inputValues[i];\r\n ctx.remainingInputs = 0;\r\n ready.push(neuron);\r\n }\r\n\r\n // Process forward pass\r\n while (ready.length > 0) {\r\n const source = ready.shift()!;\r\n const sourceCtx = source.bag as IInferenceNeuronContext;\r\n\r\n const outputs = source.onsc<IMlpSynapse>() ?? [];\r\n for (const syn of outputs) {\r\n const target = syn.ofin as IMlpNeuron;\r\n const targetCtx = target.bag as IInferenceNeuronContext;\r\n\r\n targetCtx.sum += sourceCtx.activation * syn.weight;\r\n targetCtx.remainingInputs--;\r\n\r\n if (targetCtx.remainingInputs === 0) {\r\n targetCtx.sum += target.bias;\r\n const afn = target.activationFn ?? this.mainActivation;\r\n targetCtx.activation = afn.fn(targetCtx.sum);\r\n ready.push(target);\r\n }\r\n }\r\n }\r\n\r\n // Return output activations\r\n const results = this.graph.outputs.map((n) => (n.bag as IInferenceNeuronContext).activation);\r\n return results;\r\n }\r\n\r\n public clearContext() {\r\n for (const neuron of this.graph.nodes) {\r\n MLPRuntimeUtils.resetInferenceContext(neuron);\r\n }\r\n }\r\n public deleteContext() {\r\n for (const neuron of this.graph.nodes) {\r\n neuron.bag = undefined;\r\n }\r\n }\r\n}\r\n"]}
@@ -0,0 +1,21 @@
1
+ import { IGraph } from "../../../graph";
2
+ import { INeuron, ISynapse } from "../../nn.interfaces";
3
+ export interface IActivationFunction {
4
+ fn: (x: number) => number;
5
+ derivative: (y: number) => number;
6
+ }
7
+ export interface IInferenceNeuronContext {
8
+ sum: number;
9
+ activation: number;
10
+ remainingInputs: number;
11
+ totalInputs: number;
12
+ }
13
+ export interface IMlpNeuron extends INeuron {
14
+ bias: number;
15
+ activationFn?: IActivationFunction;
16
+ }
17
+ export declare function isMlpNeuron(obj: unknown): obj is IMlpNeuron;
18
+ export interface IMlpSynapse extends ISynapse {
19
+ }
20
+ export interface IMlpGraph extends IGraph<IMlpNeuron, IMlpSynapse> {
21
+ }
@@ -0,0 +1,7 @@
1
+ /// <summary>
2
+ /// Type guard to check whether an object is an IMlpNeuron
3
+ /// </summary>
4
+ export function isMlpNeuron(obj) {
5
+ return typeof obj === "object" && obj !== null && "bias" in obj && "activationFn" in obj;
6
+ }
7
+ //# sourceMappingURL=mlp.interfaces.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"mlp.interfaces.js","sourceRoot":"","sources":["../../../../src/neuralnetwork/ann/mlp/mlp.interfaces.ts"],"names":[],"mappings":"AA8BA,aAAa;AACb,0DAA0D;AAC1D,cAAc;AACd,MAAM,UAAU,WAAW,CAAC,GAAY;IACpC,OAAO,OAAO,GAAG,KAAK,QAAQ,IAAI,GAAG,KAAK,IAAI,IAAI,MAAM,IAAI,GAAG,IAAI,cAAc,IAAI,GAAG,CAAC;AAC7F,CAAC","sourcesContent":["import { IGraph } from \"../../../graph\";\r\nimport { INeuron, ISynapse } from \"../../nn.interfaces\";\r\n\r\n/// <summary>\r\n/// Represents an activation function and its derivative.\r\n/// The derivative is expected to receive the activation output (f(x)), not the pre-activation input (x).\r\n/// </summary>\r\nexport interface IActivationFunction {\r\n fn: (x: number) => number;\r\n derivative: (y: number) => number;\r\n}\r\n\r\nexport interface IInferenceNeuronContext {\r\n sum: number;\r\n activation: number;\r\n remainingInputs: number;\r\n totalInputs: number;\r\n}\r\n\r\n/// <summary>\r\n/// Represents a neuron in a Multi-Layer Perceptron (MLP).\r\n/// It exposes its own state directly and includes a configurable bias.\r\n/// </summary>\r\nexport interface IMlpNeuron extends INeuron {\r\n /// <summary>Bias added before applying the activation function</summary>\r\n bias: number;\r\n /// <summary>Optional activation function (default = ReLU)</summary>\r\n activationFn?: IActivationFunction;\r\n}\r\n\r\n/// <summary>\r\n/// Type guard to check whether an object is an IMlpNeuron\r\n/// </summary>\r\nexport function isMlpNeuron(obj: unknown): obj is IMlpNeuron {\r\n return typeof obj === \"object\" && obj !== null && \"bias\" in obj && \"activationFn\" in obj;\r\n}\r\n\r\n/// <summary>\r\n/// Represents a synapse (connection) in an MLP network.\r\n/// </summary>\r\nexport interface IMlpSynapse extends ISynapse {}\r\n\r\n/// <summary>\r\n/// A complete MLP graph composed of neurons and synapses.\r\n/// </summary>\r\nexport interface IMlpGraph extends IGraph<IMlpNeuron, IMlpSynapse> {}\r\n"]}
@@ -0,0 +1,10 @@
1
+ import { ICartesian } from "../../../geometry";
2
+ import { IOlink } from "../../../graph";
3
+ import { Nullable } from "../../../types";
4
+ import { Neuron } from "../../nn.neuron";
5
+ import { IActivationFunction, IMlpNeuron } from "./mlp.interfaces";
6
+ export declare class MlpNeuron extends Neuron implements IMlpNeuron {
7
+ bias: number;
8
+ activationFn?: IActivationFunction | undefined;
9
+ constructor(bias?: number, activation?: IActivationFunction, onsc?: Nullable<IOlink[]>, opsc?: Nullable<IOlink[]>, position?: ICartesian);
10
+ }
@@ -0,0 +1,20 @@
1
+ import { __decorate, __metadata } from "tslib";
2
+ import { cloneable } from "../../../graph";
3
+ import { Neuron } from "../../nn.neuron";
4
+ export class MlpNeuron extends Neuron {
5
+ constructor(bias = 0, activation, onsc = null, opsc = null, position) {
6
+ super(onsc, opsc, position);
7
+ this.bias = 0;
8
+ this.bias = bias;
9
+ this.activationFn = activation;
10
+ }
11
+ }
12
+ __decorate([
13
+ cloneable,
14
+ __metadata("design:type", Number)
15
+ ], MlpNeuron.prototype, "bias", void 0);
16
+ __decorate([
17
+ cloneable,
18
+ __metadata("design:type", Object)
19
+ ], MlpNeuron.prototype, "activationFn", void 0);
20
+ //# sourceMappingURL=mlp.neuron.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"mlp.neuron.js","sourceRoot":"","sources":["../../../../src/neuralnetwork/ann/mlp/mlp.neuron.ts"],"names":[],"mappings":";AACA,OAAO,EAAE,SAAS,EAAU,MAAM,gBAAgB,CAAC;AAEnD,OAAO,EAAE,MAAM,EAAE,MAAM,iBAAiB,CAAC;AAGzC,MAAM,OAAO,SAAU,SAAQ,MAAM;IAIjC,YAAmB,OAAe,CAAC,EAAE,UAAgC,EAAE,OAA2B,IAAI,EAAE,OAA2B,IAAI,EAAE,QAAqB;QAC1J,KAAK,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAC;QAJd,SAAI,GAAW,CAAC,CAAC;QAK/B,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,IAAI,CAAC,YAAY,GAAG,UAAU,CAAC;IACnC,CAAC;CACJ;AARqB;IAAjB,SAAS;;uCAAyB;AACxB;IAAV,SAAS;;+CAAgD","sourcesContent":["import { ICartesian } from \"../../../geometry\";\r\nimport { cloneable, IOlink } from \"../../../graph\";\r\nimport { Nullable } from \"../../../types\";\r\nimport { Neuron } from \"../../nn.neuron\";\r\nimport { IActivationFunction, IMlpNeuron } from \"./mlp.interfaces\";\r\n\r\nexport class MlpNeuron extends Neuron implements IMlpNeuron {\r\n @cloneable public bias: number = 0;\r\n @cloneable activationFn?: IActivationFunction | undefined;\r\n\r\n public constructor(bias: number = 0, activation?: IActivationFunction, onsc: Nullable<IOlink[]> = null, opsc: Nullable<IOlink[]> = null, position?: ICartesian) {\r\n super(onsc, opsc, position);\r\n this.bias = bias;\r\n this.activationFn = activation;\r\n }\r\n}\r\n"]}
@@ -0,0 +1,5 @@
1
+ import { IMlpNeuron, IMlpSynapse } from "./mlp.interfaces";
2
+ export declare class MLPRuntimeUtils {
3
+ static resetInferenceContext(neuron: IMlpNeuron): void;
4
+ static resetBackpropContext(item: IMlpNeuron | IMlpSynapse): void;
5
+ }
@@ -0,0 +1,48 @@
1
+ import { isMlpNeuron } from "./mlp.interfaces";
2
+ export class MLPRuntimeUtils {
3
+ static resetInferenceContext(neuron) {
4
+ if (!neuron.bag) {
5
+ const numInputs = neuron.opsc()?.length ?? 0;
6
+ neuron.bag = { sum: 0, activation: 0, remainingInputs: numInputs, totalInputs: numInputs };
7
+ }
8
+ else {
9
+ const bag = neuron.bag;
10
+ bag.sum = 0;
11
+ bag.activation = 0;
12
+ bag.remainingInputs = bag.totalInputs;
13
+ }
14
+ }
15
+ static resetBackpropContext(item) {
16
+ if (isMlpNeuron(item)) {
17
+ if (!item.bag) {
18
+ item.bag = { error: 0, gradient: 0 };
19
+ }
20
+ else {
21
+ const bag = item.bag;
22
+ bag.error = 0;
23
+ bag.gradient = 0;
24
+ }
25
+ return;
26
+ }
27
+ if (!item.bag) {
28
+ item.bag = {
29
+ gradient: 0,
30
+ velocity: 0,
31
+ m: 0,
32
+ v: 0,
33
+ prelookedWeight: 0,
34
+ weightDelta: 0,
35
+ };
36
+ }
37
+ else {
38
+ const bag = item.bag;
39
+ bag.gradient = 0;
40
+ bag.velocity = 0;
41
+ bag.m = 0;
42
+ bag.v = 0;
43
+ bag.prelookedWeight = 0;
44
+ bag.weightDelta = 0;
45
+ }
46
+ }
47
+ }
48
+ //# sourceMappingURL=mlp.runtime.utils.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"mlp.runtime.utils.js","sourceRoot":"","sources":["../../../../src/neuralnetwork/ann/mlp/mlp.runtime.utils.ts"],"names":[],"mappings":"AAAA,OAAO,EAAoD,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAGjG,MAAM,OAAO,eAAe;IACjB,MAAM,CAAC,qBAAqB,CAAC,MAAkB;QAClD,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC;YACd,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,EAAe,EAAE,MAAM,IAAI,CAAC,CAAC;YAC1D,MAAM,CAAC,GAAG,GAAG,EAAE,GAAG,EAAE,CAAC,EAAE,UAAU,EAAE,CAAC,EAAE,eAAe,EAAE,SAAS,EAAE,WAAW,EAAE,SAAS,EAAE,CAAC;QAC/F,CAAC;aAAM,CAAC;YACJ,MAAM,GAAG,GAAG,MAAM,CAAC,GAA8B,CAAC;YAClD,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC;YACZ,GAAG,CAAC,UAAU,GAAG,CAAC,CAAC;YACnB,GAAG,CAAC,eAAe,GAAG,GAAG,CAAC,WAAW,CAAC;QAC1C,CAAC;IACL,CAAC;IAEM,MAAM,CAAC,oBAAoB,CAAC,IAA8B;QAC7D,IAAI,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC;YACpB,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC;gBACZ,IAAI,CAAC,GAAG,GAAG,EAAE,KAAK,EAAE,CAAC,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC;YACzC,CAAC;iBAAM,CAAC;gBACJ,MAAM,GAAG,GAAG,IAAI,CAAC,GAA6B,CAAC;gBAC/C,GAAG,CAAC,KAAK,GAAG,CAAC,CAAC;gBACd,GAAG,CAAC,QAAQ,GAAG,CAAC,CAAC;YACrB,CAAC;YACD,OAAO;QACX,CAAC;QACD,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC;YACZ,IAAI,CAAC,GAAG,GAAG;gBACP,QAAQ,EAAE,CAAC;gBACX,QAAQ,EAAE,CAAC;gBACX,CAAC,EAAE,CAAC;gBACJ,CAAC,EAAE,CAAC;gBACJ,eAAe,EAAE,CAAC;gBAClB,WAAW,EAAE,CAAC;aACjB,CAAC;QACN,CAAC;aAAM,CAAC;YACJ,MAAM,GAAG,GAAG,IAAI,CAAC,GAA8B,CAAC;YAChD,GAAG,CAAC,QAAQ,GAAG,CAAC,CAAC;YACjB,GAAG,CAAC,QAAQ,GAAG,CAAC,CAAC;YACjB,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;YACV,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;YACV,GAAG,CAAC,eAAe,GAAG,CAAC,CAAC;YACxB,GAAG,CAAC,WAAW,GAAG,CAAC,CAAC;QACxB,CAAC;IACL,CAAC;CACJ","sourcesContent":["import { IMlpNeuron, IMlpSynapse, IInferenceNeuronContext, isMlpNeuron } from \"./mlp.interfaces\";\r\nimport { IBackpropNeuronContext, IBackpropSynapseContext } from \"./training\";\r\n\r\nexport class MLPRuntimeUtils {\r\n public static resetInferenceContext(neuron: IMlpNeuron): void {\r\n if (!neuron.bag) {\r\n const numInputs = neuron.opsc<IMlpSynapse>()?.length ?? 0;\r\n neuron.bag = { sum: 0, activation: 0, remainingInputs: numInputs, totalInputs: numInputs };\r\n } else {\r\n const bag = neuron.bag as IInferenceNeuronContext;\r\n bag.sum = 0;\r\n bag.activation = 0;\r\n bag.remainingInputs = bag.totalInputs;\r\n }\r\n }\r\n\r\n public static resetBackpropContext(item: IMlpNeuron | IMlpSynapse): void {\r\n if (isMlpNeuron(item)) {\r\n if (!item.bag) {\r\n item.bag = { error: 0, gradient: 0 };\r\n } else {\r\n const bag = item.bag as IBackpropNeuronContext;\r\n bag.error = 0;\r\n bag.gradient = 0;\r\n }\r\n return;\r\n }\r\n if (!item.bag) {\r\n item.bag = {\r\n gradient: 0,\r\n velocity: 0,\r\n m: 0,\r\n v: 0,\r\n prelookedWeight: 0,\r\n weightDelta: 0,\r\n };\r\n } else {\r\n const bag = item.bag as IBackpropSynapseContext;\r\n bag.gradient = 0;\r\n bag.velocity = 0;\r\n bag.m = 0;\r\n bag.v = 0;\r\n bag.prelookedWeight = 0;\r\n bag.weightDelta = 0;\r\n }\r\n }\r\n}\r\n"]}
@@ -0,0 +1,6 @@
1
+ import { INode } from "../../../graph";
2
+ import { Synapse } from "../../nn.synapse";
3
+ import { IMlpSynapse } from "./mlp.interfaces";
4
+ export declare class MlpSynapse extends Synapse implements IMlpSynapse {
5
+ constructor(oini: INode, ofin: INode);
6
+ }
@@ -0,0 +1,7 @@
1
+ import { Synapse } from "../../nn.synapse";
2
+ export class MlpSynapse extends Synapse {
3
+ constructor(oini, ofin) {
4
+ super(oini, ofin);
5
+ }
6
+ }
7
+ //# sourceMappingURL=mlp.synapse.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"mlp.synapse.js","sourceRoot":"","sources":["../../../../src/neuralnetwork/ann/mlp/mlp.synapse.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAG3C,MAAM,OAAO,UAAW,SAAQ,OAAO;IACnC,YAAmB,IAAW,EAAE,IAAW;QACvC,KAAK,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;IACtB,CAAC;CACJ","sourcesContent":["import { INode } from \"../../../graph\";\r\nimport { Synapse } from \"../../nn.synapse\";\r\nimport { IMlpSynapse } from \"./mlp.interfaces\";\r\n\r\nexport class MlpSynapse extends Synapse implements IMlpSynapse {\r\n public constructor(oini: INode, ofin: INode) {\r\n super(oini, ofin);\r\n }\r\n}\r\n"]}
@@ -0,0 +1,4 @@
1
+ export * from "./mlp.optimizers";
2
+ export * from "./mlp.loss";
3
+ export * from "./mlp.training.interfaces";
4
+ export * from "./mlp.training";
@@ -0,0 +1,5 @@
1
+ export * from "./mlp.optimizers";
2
+ export * from "./mlp.loss";
3
+ export * from "./mlp.training.interfaces";
4
+ export * from "./mlp.training";
5
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../../src/neuralnetwork/ann/mlp/training/index.ts"],"names":[],"mappings":"AAAA,cAAc,kBAAkB,CAAC;AACjC,cAAc,YAAY,CAAC;AAC3B,cAAc,2BAA2B,CAAC;AAC1C,cAAc,gBAAgB,CAAC","sourcesContent":["export * from \"./mlp.optimizers\";\r\nexport * from \"./mlp.loss\";\r\nexport * from \"./mlp.training.interfaces\";\r\nexport * from \"./mlp.training\";\r\n"]}
@@ -0,0 +1,10 @@
1
+ export declare const LossFunctions: {
2
+ MSE: {
3
+ loss: (o: number, y: number) => number;
4
+ dLoss: (o: number, y: number) => number;
5
+ };
6
+ CrossEntropy: {
7
+ loss: (o: number, y: number) => number;
8
+ dLoss: (o: number, y: number) => number;
9
+ };
10
+ };
@@ -0,0 +1,11 @@
1
+ export const LossFunctions = {
2
+ MSE: {
3
+ loss: (o, y) => 0.5 * Math.pow(o - y, 2),
4
+ dLoss: (o, y) => o - y,
5
+ },
6
+ CrossEntropy: {
7
+ loss: (o, y) => -y * Math.log(o + 1e-8) - (1 - y) * Math.log(1 - o + 1e-8),
8
+ dLoss: (o, y) => (o - y) / ((o + 1e-8) * (1 - o + 1e-8)),
9
+ },
10
+ };
11
+ //# sourceMappingURL=mlp.loss.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"mlp.loss.js","sourceRoot":"","sources":["../../../../../src/neuralnetwork/ann/mlp/training/mlp.loss.ts"],"names":[],"mappings":"AAEA,MAAM,CAAC,MAAM,aAAa,GAAG;IACzB,GAAG,EAAE;QACD,IAAI,EAAE,CAAC,CAAS,EAAE,CAAS,EAAE,EAAE,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;QACxD,KAAK,EAAE,CAAC,CAAS,EAAE,CAAS,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC;KACzC;IACD,YAAY,EAAE;QACV,IAAI,EAAE,CAAC,CAAS,EAAE,CAAS,EAAE,EAAE,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC;QAC1F,KAAK,EAAE,CAAC,CAAS,EAAE,CAAS,EAAE,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;KAC3E;CACoC,CAAC","sourcesContent":["import { ILossFunction } from \"./mlp.training.interfaces\";\r\n\r\nexport const LossFunctions = {\r\n MSE: {\r\n loss: (o: number, y: number) => 0.5 * Math.pow(o - y, 2),\r\n dLoss: (o: number, y: number) => o - y,\r\n },\r\n CrossEntropy: {\r\n loss: (o: number, y: number) => -y * Math.log(o + 1e-8) - (1 - y) * Math.log(1 - o + 1e-8),\r\n dLoss: (o: number, y: number) => (o - y) / ((o + 1e-8) * (1 - o + 1e-8)),\r\n },\r\n} satisfies Record<string, ILossFunction>;\r\n"]}
@@ -0,0 +1,7 @@
1
+ import { IOptimizer } from "./mlp.training.interfaces";
2
+ export declare class Optimizers {
3
+ static SGD: () => IOptimizer;
4
+ static MomentumSGD: (momentum: number) => IOptimizer;
5
+ static NAG: (momentum?: number) => IOptimizer;
6
+ static Adam: (beta1?: number, beta2?: number, epsilon?: number) => IOptimizer;
7
+ }
@@ -0,0 +1,59 @@
1
+ /// <summary>
2
+ /// Stochastic Gradient Descent (SGD) optimizer
3
+ export class Optimizers {
4
+ }
5
+ /// </summary>
6
+ Optimizers.SGD = () => ({
7
+ apply(synapse, lr, gradient, ctx) {
8
+ const bag = (synapse.bag ??= { gradient: gradient });
9
+ bag.gradient = gradient;
10
+ bag.weightDelta = -lr * gradient;
11
+ synapse.weight += bag.weightDelta;
12
+ },
13
+ });
14
+ /// <summary>
15
+ /// SGD with Momentum optimizer
16
+ /// </summary>
17
+ Optimizers.MomentumSGD = (momentum) => ({
18
+ apply(synapse, lr, gradient, ctx) {
19
+ const bag = (synapse.bag ??= { gradient: gradient });
20
+ bag.gradient = gradient;
21
+ bag.velocity ??= 0;
22
+ bag.velocity = momentum * bag.velocity - lr * gradient;
23
+ bag.weightDelta = bag.velocity;
24
+ synapse.weight += bag.weightDelta;
25
+ },
26
+ });
27
+ /// <summary>
28
+ /// Nesterov Accelerated Gradient (NAG) optimizer
29
+ /// </summary>
30
+ Optimizers.NAG = (momentum = 0.9) => ({
31
+ apply(synapse, lr, gradient, ctx) {
32
+ const bag = (synapse.bag ??= { gradient: gradient });
33
+ bag.gradient = gradient;
34
+ bag.velocity ??= 0;
35
+ bag.velocity = momentum * bag.velocity - lr * gradient;
36
+ bag.weightDelta = bag.velocity;
37
+ synapse.weight += bag.weightDelta;
38
+ bag.prelookedWeight = undefined;
39
+ },
40
+ });
41
+ /// <summary>
42
+ /// Adam optimizer with bias correction
43
+ /// </summary>
44
+ Optimizers.Adam = (beta1 = 0.9, beta2 = 0.999, epsilon = 1e-8) => ({
45
+ apply(synapse, lr, gradient, ctx) {
46
+ const bag = (synapse.bag ??= { gradient: gradient });
47
+ const t = ctx.iteration + 1;
48
+ bag.gradient = gradient;
49
+ bag.m ??= 0;
50
+ bag.v ??= 0;
51
+ bag.m = beta1 * bag.m + (1 - beta1) * gradient;
52
+ bag.v = beta2 * bag.v + (1 - beta2) * gradient * gradient;
53
+ const mHat = bag.m / (1 - Math.pow(beta1, t));
54
+ const vHat = bag.v / (1 - Math.pow(beta2, t));
55
+ bag.weightDelta = (-lr * mHat) / (Math.sqrt(vHat) + epsilon);
56
+ synapse.weight += bag.weightDelta;
57
+ },
58
+ });
59
+ //# sourceMappingURL=mlp.optimizers.js.map