@reicek/neataptic-ts 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (272) hide show
  1. package/.github/ISSUE_TEMPLATE/bug_report.md +33 -0
  2. package/.github/ISSUE_TEMPLATE/feature_request.md +27 -0
  3. package/.github/PULL_REQUEST_TEMPLATE.md +28 -0
  4. package/.github/workflows/ci.yml +41 -0
  5. package/.github/workflows/deploy-pages.yml +29 -0
  6. package/.github/workflows/manual_release_pipeline.yml +62 -0
  7. package/.github/workflows/publish.yml +85 -0
  8. package/.github/workflows/release_dispatch.yml +38 -0
  9. package/.travis.yml +5 -0
  10. package/CONTRIBUTING.md +92 -0
  11. package/LICENSE +24 -0
  12. package/ONNX_EXPORT.md +87 -0
  13. package/README.md +1173 -0
  14. package/RELEASE.md +54 -0
  15. package/dist-docs/package.json +1 -0
  16. package/dist-docs/scripts/generate-docs.d.ts +2 -0
  17. package/dist-docs/scripts/generate-docs.d.ts.map +1 -0
  18. package/dist-docs/scripts/generate-docs.js +536 -0
  19. package/dist-docs/scripts/generate-docs.js.map +1 -0
  20. package/dist-docs/scripts/render-docs-html.d.ts +2 -0
  21. package/dist-docs/scripts/render-docs-html.d.ts.map +1 -0
  22. package/dist-docs/scripts/render-docs-html.js +148 -0
  23. package/dist-docs/scripts/render-docs-html.js.map +1 -0
  24. package/docs/FOLDERS.md +14 -0
  25. package/docs/README.md +1173 -0
  26. package/docs/architecture/README.md +1391 -0
  27. package/docs/architecture/index.html +938 -0
  28. package/docs/architecture/network/README.md +1210 -0
  29. package/docs/architecture/network/index.html +908 -0
  30. package/docs/assets/ascii-maze.bundle.js +16542 -0
  31. package/docs/assets/ascii-maze.bundle.js.map +7 -0
  32. package/docs/index.html +1419 -0
  33. package/docs/methods/README.md +670 -0
  34. package/docs/methods/index.html +477 -0
  35. package/docs/multithreading/README.md +274 -0
  36. package/docs/multithreading/index.html +215 -0
  37. package/docs/multithreading/workers/README.md +23 -0
  38. package/docs/multithreading/workers/browser/README.md +39 -0
  39. package/docs/multithreading/workers/browser/index.html +70 -0
  40. package/docs/multithreading/workers/index.html +57 -0
  41. package/docs/multithreading/workers/node/README.md +33 -0
  42. package/docs/multithreading/workers/node/index.html +66 -0
  43. package/docs/neat/README.md +1284 -0
  44. package/docs/neat/index.html +906 -0
  45. package/docs/src/README.md +2659 -0
  46. package/docs/src/index.html +1579 -0
  47. package/jest.config.ts +32 -0
  48. package/package.json +99 -0
  49. package/plans/HyperMorphoNEAT.md +293 -0
  50. package/plans/ONNX_EXPORT_PLAN.md +46 -0
  51. package/scripts/generate-docs.ts +486 -0
  52. package/scripts/render-docs-html.ts +138 -0
  53. package/scripts/types.d.ts +2 -0
  54. package/src/README.md +2659 -0
  55. package/src/architecture/README.md +1391 -0
  56. package/src/architecture/activationArrayPool.ts +135 -0
  57. package/src/architecture/architect.ts +635 -0
  58. package/src/architecture/connection.ts +148 -0
  59. package/src/architecture/group.ts +406 -0
  60. package/src/architecture/layer.ts +804 -0
  61. package/src/architecture/network/README.md +1210 -0
  62. package/src/architecture/network/network.activate.ts +223 -0
  63. package/src/architecture/network/network.connect.ts +157 -0
  64. package/src/architecture/network/network.deterministic.ts +167 -0
  65. package/src/architecture/network/network.evolve.ts +426 -0
  66. package/src/architecture/network/network.gating.ts +186 -0
  67. package/src/architecture/network/network.genetic.ts +247 -0
  68. package/src/architecture/network/network.mutate.ts +624 -0
  69. package/src/architecture/network/network.onnx.ts +463 -0
  70. package/src/architecture/network/network.prune.ts +216 -0
  71. package/src/architecture/network/network.remove.ts +96 -0
  72. package/src/architecture/network/network.serialize.ts +309 -0
  73. package/src/architecture/network/network.slab.ts +262 -0
  74. package/src/architecture/network/network.standalone.ts +246 -0
  75. package/src/architecture/network/network.stats.ts +59 -0
  76. package/src/architecture/network/network.topology.ts +86 -0
  77. package/src/architecture/network/network.training.ts +1278 -0
  78. package/src/architecture/network.ts +1302 -0
  79. package/src/architecture/node.ts +1288 -0
  80. package/src/architecture/onnx.ts +3 -0
  81. package/src/config.ts +83 -0
  82. package/src/methods/README.md +670 -0
  83. package/src/methods/activation.ts +372 -0
  84. package/src/methods/connection.ts +31 -0
  85. package/src/methods/cost.ts +347 -0
  86. package/src/methods/crossover.ts +63 -0
  87. package/src/methods/gating.ts +43 -0
  88. package/src/methods/methods.ts +8 -0
  89. package/src/methods/mutation.ts +300 -0
  90. package/src/methods/rate.ts +257 -0
  91. package/src/methods/selection.ts +65 -0
  92. package/src/multithreading/README.md +274 -0
  93. package/src/multithreading/multi.ts +339 -0
  94. package/src/multithreading/workers/README.md +23 -0
  95. package/src/multithreading/workers/browser/README.md +39 -0
  96. package/src/multithreading/workers/browser/testworker.ts +99 -0
  97. package/src/multithreading/workers/node/README.md +33 -0
  98. package/src/multithreading/workers/node/testworker.ts +72 -0
  99. package/src/multithreading/workers/node/worker.ts +70 -0
  100. package/src/multithreading/workers/workers.ts +22 -0
  101. package/src/neat/README.md +1284 -0
  102. package/src/neat/neat.adaptive.ts +544 -0
  103. package/src/neat/neat.compat.ts +164 -0
  104. package/src/neat/neat.constants.ts +20 -0
  105. package/src/neat/neat.diversity.ts +217 -0
  106. package/src/neat/neat.evaluate.ts +328 -0
  107. package/src/neat/neat.evolve.ts +1026 -0
  108. package/src/neat/neat.export.ts +249 -0
  109. package/src/neat/neat.helpers.ts +235 -0
  110. package/src/neat/neat.lineage.ts +220 -0
  111. package/src/neat/neat.multiobjective.ts +260 -0
  112. package/src/neat/neat.mutation.ts +718 -0
  113. package/src/neat/neat.objectives.ts +157 -0
  114. package/src/neat/neat.pruning.ts +190 -0
  115. package/src/neat/neat.selection.ts +269 -0
  116. package/src/neat/neat.speciation.ts +460 -0
  117. package/src/neat/neat.species.ts +151 -0
  118. package/src/neat/neat.telemetry.exports.ts +469 -0
  119. package/src/neat/neat.telemetry.ts +933 -0
  120. package/src/neat/neat.types.ts +275 -0
  121. package/src/neat.ts +1042 -0
  122. package/src/neataptic.ts +10 -0
  123. package/test/architecture/activationArrayPool.capacity.test.ts +19 -0
  124. package/test/architecture/activationArrayPool.test.ts +46 -0
  125. package/test/architecture/connection.test.ts +290 -0
  126. package/test/architecture/group.test.ts +950 -0
  127. package/test/architecture/layer.test.ts +1535 -0
  128. package/test/architecture/network.pruning.test.ts +65 -0
  129. package/test/architecture/node.test.ts +1602 -0
  130. package/test/examples/asciiMaze/asciiMaze.e2e.test.ts +499 -0
  131. package/test/examples/asciiMaze/asciiMaze.ts +41 -0
  132. package/test/examples/asciiMaze/browser-entry.ts +164 -0
  133. package/test/examples/asciiMaze/browserLogger.ts +221 -0
  134. package/test/examples/asciiMaze/browserTerminalUtility.ts +48 -0
  135. package/test/examples/asciiMaze/colors.ts +119 -0
  136. package/test/examples/asciiMaze/dashboardManager.ts +968 -0
  137. package/test/examples/asciiMaze/evolutionEngine.ts +1248 -0
  138. package/test/examples/asciiMaze/fitness.ts +136 -0
  139. package/test/examples/asciiMaze/index.html +128 -0
  140. package/test/examples/asciiMaze/index.ts +26 -0
  141. package/test/examples/asciiMaze/interfaces.ts +235 -0
  142. package/test/examples/asciiMaze/mazeMovement.ts +996 -0
  143. package/test/examples/asciiMaze/mazeUtils.ts +278 -0
  144. package/test/examples/asciiMaze/mazeVision.ts +402 -0
  145. package/test/examples/asciiMaze/mazeVisualization.ts +585 -0
  146. package/test/examples/asciiMaze/mazes.ts +245 -0
  147. package/test/examples/asciiMaze/networkRefinement.ts +76 -0
  148. package/test/examples/asciiMaze/networkVisualization.ts +901 -0
  149. package/test/examples/asciiMaze/terminalUtility.ts +73 -0
  150. package/test/methods/activation.test.ts +1142 -0
  151. package/test/methods/connection.test.ts +146 -0
  152. package/test/methods/cost.test.ts +1123 -0
  153. package/test/methods/crossover.test.ts +202 -0
  154. package/test/methods/gating.test.ts +144 -0
  155. package/test/methods/mutation.test.ts +451 -0
  156. package/test/methods/optimizers.advanced.test.ts +80 -0
  157. package/test/methods/optimizers.behavior.test.ts +105 -0
  158. package/test/methods/optimizers.formula.test.ts +89 -0
  159. package/test/methods/rate.cosineWarmRestarts.test.ts +44 -0
  160. package/test/methods/rate.linearWarmupDecay.test.ts +41 -0
  161. package/test/methods/rate.reduceOnPlateau.test.ts +45 -0
  162. package/test/methods/rate.test.ts +684 -0
  163. package/test/methods/selection.test.ts +245 -0
  164. package/test/multithreading/activations.functions.test.ts +54 -0
  165. package/test/multithreading/multi.test.ts +290 -0
  166. package/test/multithreading/worker.node.process.test.ts +39 -0
  167. package/test/multithreading/workers.coverage.test.ts +36 -0
  168. package/test/multithreading/workers.dynamic.import.test.ts +8 -0
  169. package/test/neat/neat.adaptive.complexityBudget.test.ts +34 -0
  170. package/test/neat/neat.adaptive.criterion.complexity.test.ts +50 -0
  171. package/test/neat/neat.adaptive.mutation.strategy.test.ts +37 -0
  172. package/test/neat/neat.adaptive.operator.decay.test.ts +31 -0
  173. package/test/neat/neat.adaptive.phasedComplexity.test.ts +25 -0
  174. package/test/neat/neat.adaptive.pruning.test.ts +25 -0
  175. package/test/neat/neat.adaptive.targetSpecies.test.ts +43 -0
  176. package/test/neat/neat.additional.coverage.test.ts +126 -0
  177. package/test/neat/neat.advanced.enhancements.test.ts +85 -0
  178. package/test/neat/neat.advanced.test.ts +589 -0
  179. package/test/neat/neat.diversity.autocompat.test.ts +47 -0
  180. package/test/neat/neat.diversity.metrics.test.ts +21 -0
  181. package/test/neat/neat.diversity.stats.test.ts +44 -0
  182. package/test/neat/neat.enhancements.test.ts +79 -0
  183. package/test/neat/neat.entropy.ancestorAdaptive.test.ts +133 -0
  184. package/test/neat/neat.entropy.compat.csv.test.ts +108 -0
  185. package/test/neat/neat.evolution.pruning.test.ts +39 -0
  186. package/test/neat/neat.fastmode.autotune.test.ts +42 -0
  187. package/test/neat/neat.innovation.test.ts +134 -0
  188. package/test/neat/neat.lineage.antibreeding.test.ts +35 -0
  189. package/test/neat/neat.lineage.entropy.test.ts +56 -0
  190. package/test/neat/neat.lineage.inbreeding.test.ts +49 -0
  191. package/test/neat/neat.lineage.pressure.test.ts +29 -0
  192. package/test/neat/neat.multiobjective.adaptive.test.ts +57 -0
  193. package/test/neat/neat.multiobjective.dynamic.schedule.test.ts +46 -0
  194. package/test/neat/neat.multiobjective.dynamic.test.ts +31 -0
  195. package/test/neat/neat.multiobjective.fastsort.delegation.test.ts +51 -0
  196. package/test/neat/neat.multiobjective.prune.test.ts +39 -0
  197. package/test/neat/neat.multiobjective.test.ts +21 -0
  198. package/test/neat/neat.mutation.undefined.pool.test.ts +24 -0
  199. package/test/neat/neat.objective.events.test.ts +26 -0
  200. package/test/neat/neat.objective.importance.test.ts +21 -0
  201. package/test/neat/neat.objective.lifetimes.test.ts +33 -0
  202. package/test/neat/neat.offspring.allocation.test.ts +22 -0
  203. package/test/neat/neat.operator.bandit.test.ts +17 -0
  204. package/test/neat/neat.operator.phases.test.ts +38 -0
  205. package/test/neat/neat.pruneInactive.behavior.test.ts +54 -0
  206. package/test/neat/neat.reenable.adaptation.test.ts +18 -0
  207. package/test/neat/neat.rng.state.test.ts +22 -0
  208. package/test/neat/neat.spawn.add.test.ts +123 -0
  209. package/test/neat/neat.speciation.test.ts +96 -0
  210. package/test/neat/neat.species.allocation.telemetry.test.ts +26 -0
  211. package/test/neat/neat.species.history.csv.test.ts +24 -0
  212. package/test/neat/neat.telemetry.advanced.test.ts +226 -0
  213. package/test/neat/neat.telemetry.csv.lineage.test.ts +19 -0
  214. package/test/neat/neat.telemetry.parity.test.ts +42 -0
  215. package/test/neat/neat.telemetry.stream.test.ts +19 -0
  216. package/test/neat/neat.telemetry.test.ts +16 -0
  217. package/test/neat/neat.test.ts +422 -0
  218. package/test/neat/neat.utilities.test.ts +44 -0
  219. package/test/network/__suppress_console.ts +9 -0
  220. package/test/network/acyclic.topoorder.test.ts +17 -0
  221. package/test/network/checkpoint.metricshook.test.ts +36 -0
  222. package/test/network/error.handling.test.ts +581 -0
  223. package/test/network/evolution.test.ts +285 -0
  224. package/test/network/genetic.test.ts +208 -0
  225. package/test/network/learning.capability.test.ts +244 -0
  226. package/test/network/mutation.effects.test.ts +492 -0
  227. package/test/network/network.activate.test.ts +115 -0
  228. package/test/network/network.activateBatch.test.ts +30 -0
  229. package/test/network/network.deterministic.test.ts +64 -0
  230. package/test/network/network.evolve.branches.test.ts +75 -0
  231. package/test/network/network.evolve.multithread.branches.test.ts +83 -0
  232. package/test/network/network.evolve.test.ts +100 -0
  233. package/test/network/network.gating.removal.test.ts +93 -0
  234. package/test/network/network.mutate.additional.test.ts +145 -0
  235. package/test/network/network.mutate.edgecases.test.ts +101 -0
  236. package/test/network/network.mutate.test.ts +101 -0
  237. package/test/network/network.prune.earlyexit.test.ts +38 -0
  238. package/test/network/network.remove.errors.test.ts +45 -0
  239. package/test/network/network.slab.fallbacks.test.ts +22 -0
  240. package/test/network/network.stats.test.ts +45 -0
  241. package/test/network/network.training.advanced.test.ts +149 -0
  242. package/test/network/network.training.basic.test.ts +228 -0
  243. package/test/network/network.training.helpers.test.ts +183 -0
  244. package/test/network/onnx.export.test.ts +310 -0
  245. package/test/network/onnx.import.test.ts +129 -0
  246. package/test/network/pruning.topology.test.ts +282 -0
  247. package/test/network/regularization.determinism.test.ts +83 -0
  248. package/test/network/regularization.dropconnect.test.ts +17 -0
  249. package/test/network/regularization.dropconnect.validation.test.ts +18 -0
  250. package/test/network/regularization.stochasticdepth.test.ts +27 -0
  251. package/test/network/regularization.test.ts +843 -0
  252. package/test/network/regularization.weightnoise.test.ts +30 -0
  253. package/test/network/setupTests.ts +2 -0
  254. package/test/network/standalone.test.ts +332 -0
  255. package/test/network/structure.serialization.test.ts +660 -0
  256. package/test/training/training.determinism.mixed-precision.test.ts +134 -0
  257. package/test/training/training.earlystopping.test.ts +91 -0
  258. package/test/training/training.edge-cases.test.ts +91 -0
  259. package/test/training/training.extensions.test.ts +47 -0
  260. package/test/training/training.gradient.features.test.ts +110 -0
  261. package/test/training/training.gradient.refinements.test.ts +170 -0
  262. package/test/training/training.gradient.separate-bias.test.ts +41 -0
  263. package/test/training/training.optimizer.test.ts +48 -0
  264. package/test/training/training.plateau.smoothing.test.ts +58 -0
  265. package/test/training/training.smoothing.types.test.ts +174 -0
  266. package/test/training/training.train.options.coverage.test.ts +52 -0
  267. package/test/utils/console-helper.ts +76 -0
  268. package/test/utils/jest-setup.ts +60 -0
  269. package/test/utils/test-helpers.ts +175 -0
  270. package/tsconfig.docs.json +12 -0
  271. package/tsconfig.json +21 -0
  272. package/webpack.config.js +49 -0
@@ -0,0 +1,347 @@
1
+ /**
2
+ * Provides a collection of standard cost functions (also known as loss functions)
3
+ * used for evaluating the performance of neural networks during training.
4
+ *
5
+ * Cost functions quantify the difference between the network's predictions
6
+ * and the actual target values. The goal of training is typically to minimize
7
+ * the value of the cost function. The choice of cost function is crucial and
8
+ * depends on the specific task (e.g., regression, classification) and the
9
+ * desired behavior of the model.
10
+ *
11
+ * @see {@link https://en.wikipedia.org/wiki/Loss_function}
12
+ */
13
+ import { PROB_EPSILON } from '../neat/neat.constants';
14
+
15
+ export default class Cost {
16
+ /**
17
+ * Calculates the Cross Entropy error, commonly used for classification tasks.
18
+ *
19
+ * This function measures the performance of a classification model whose output is
20
+ * a probability value between 0 and 1. Cross-entropy loss increases as the
21
+ * predicted probability diverges from the actual label.
22
+ *
23
+ * It uses a small epsilon (PROB_EPSILON = 1e-15) to prevent `log(0)` which would result in `NaN`.
24
+ * Output values are clamped to the range `[epsilon, 1 - epsilon]` for numerical stability.
25
+ *
26
+ * @see {@link https://en.wikipedia.org/wiki/Cross_entropy}
27
+ * @param {number[]} targets - An array of target values, typically 0 or 1 for binary classification, or probabilities for soft labels.
28
+ * @param {number[]} outputs - An array of output values from the network, representing probabilities (expected to be between 0 and 1).
29
+ * @returns {number} The mean cross-entropy error over all samples.
30
+ * @throws {Error} If the target and output arrays have different lengths.
31
+ */
32
+ static crossEntropy(targets: number[], outputs: number[]): number {
33
+ let error = 0;
34
+ const epsilon = PROB_EPSILON; // Small constant to avoid log(0)
35
+
36
+ if (targets.length !== outputs.length) {
37
+ throw new Error('Target and output arrays must have the same length.');
38
+ }
39
+
40
+ for (let i = 0; i < outputs.length; i++) {
41
+ const target = targets[i];
42
+ const output = outputs[i];
43
+
44
+ // Clamp output to prevent log(0) or log(<0) issues.
45
+ const clampedOutput = Math.max(epsilon, Math.min(1 - epsilon, output));
46
+
47
+ // Note: Assumes target is 0 or 1 for standard binary cross-entropy.
48
+ // The formula handles soft labels (targets between 0 and 1) correctly.
49
+ if (target === 1) {
50
+ error -= Math.log(clampedOutput); // Cost when target is 1
51
+ } else if (target === 0) {
52
+ error -= Math.log(1 - clampedOutput); // Cost when target is 0
53
+ } else {
54
+ // General case for targets between 0 and 1 (soft labels)
55
+ error -=
56
+ target * Math.log(clampedOutput) +
57
+ (1 - target) * Math.log(1 - clampedOutput);
58
+ }
59
+ }
60
+
61
+ // Return the average error over the batch/dataset.
62
+ return error / outputs.length;
63
+ }
64
+
65
+ /**
66
+ * Softmax Cross Entropy for mutually exclusive multi-class outputs given raw (pre-softmax or arbitrary) scores.
67
+ * Applies a numerically stable softmax to the outputs internally then computes -sum(target * log(prob)).
68
+ * Targets may be soft labels and are expected to sum to 1 (will be re-normalized if not).
69
+ */
70
+ static softmaxCrossEntropy(targets: number[], outputs: number[]): number {
71
+ if (targets.length !== outputs.length) {
72
+ throw new Error('Target and output arrays must have the same length.');
73
+ }
74
+ const n = outputs.length;
75
+ // Normalize targets if they don't sum to 1
76
+ let tSum = 0;
77
+ for (const t of targets) tSum += t;
78
+ const normTargets =
79
+ tSum > 0 ? targets.map((t) => t / tSum) : targets.slice();
80
+ // Stable softmax
81
+ const max = Math.max(...outputs);
82
+ const exps = outputs.map((o) => Math.exp(o - max));
83
+ const sum = exps.reduce((a, b) => a + b, 0) || 1;
84
+ const probs = exps.map((e) => e / sum);
85
+ let loss = 0;
86
+ const eps = PROB_EPSILON;
87
+ for (let i = 0; i < n; i++) {
88
+ const p = Math.min(1 - eps, Math.max(eps, probs[i]));
89
+ const t = normTargets[i];
90
+ loss -= t * Math.log(p);
91
+ }
92
+ return loss; // mean not applied; caller can average externally if batching
93
+ }
94
+
95
+ /**
96
+ * Calculates the Mean Squared Error (MSE), a common loss function for regression tasks.
97
+ *
98
+ * MSE measures the average of the squares of the errors—that is, the average
99
+ * squared difference between the estimated values and the actual value.
100
+ * It is sensitive to outliers due to the squaring of the error terms.
101
+ *
102
+ * @see {@link https://en.wikipedia.org/wiki/Mean_squared_error}
103
+ * @param {number[]} targets - An array of target numerical values.
104
+ * @param {number[]} outputs - An array of output values from the network.
105
+ * @returns {number} The mean squared error.
106
+ * @throws {Error} If the target and output arrays have different lengths (implicitly via forEach).
107
+ */
108
+ static mse(targets: number[], outputs: number[]): number {
109
+ if (targets.length !== outputs.length) {
110
+ throw new Error('Target and output arrays must have the same length.');
111
+ }
112
+ let error = 0;
113
+
114
+ // Assumes targets and outputs have the same length.
115
+ outputs.forEach((output, outputIndex) => {
116
+ // Calculate the squared difference for each sample.
117
+ error += Math.pow(targets[outputIndex] - output, 2);
118
+ });
119
+
120
+ // Return the average squared error.
121
+ return error / outputs.length;
122
+ }
123
+
124
+ /**
125
+ * Calculates the Binary Error rate, often used as a simple accuracy metric for classification.
126
+ *
127
+ * This function calculates the proportion of misclassifications by comparing the
128
+ * rounded network outputs (thresholded at 0.5) against the target labels.
129
+ * It assumes target values are 0 or 1, and outputs are probabilities between 0 and 1.
130
+ * Note: This is equivalent to `1 - accuracy` for binary classification.
131
+ *
132
+ * @param {number[]} targets - An array of target values, expected to be 0 or 1.
133
+ * @param {number[]} outputs - An array of output values from the network, typically probabilities between 0 and 1.
134
+ * @returns {number} The proportion of misclassified samples (error rate, between 0 and 1).
135
+ * @throws {Error} If the target and output arrays have different lengths (implicitly via forEach).
136
+ */
137
+ static binary(targets: number[], outputs: number[]): number {
138
+ if (targets.length !== outputs.length) {
139
+ throw new Error('Target and output arrays must have the same length.');
140
+ }
141
+ let misses = 0;
142
+
143
+ // Assumes targets and outputs have the same length.
144
+ outputs.forEach((output, outputIndex) => {
145
+ // Round output to nearest integer (0 or 1) using a 0.5 threshold.
146
+ // Compare rounded output to the target label.
147
+ misses += Math.round(targets[outputIndex]) !== Math.round(output) ? 1 : 0;
148
+ });
149
+
150
+ // Return the error rate (proportion of misses).
151
+ return misses / outputs.length;
152
+ // Alternative: return `misses` to get the raw count of misclassifications.
153
+ }
154
+
155
+ /**
156
+ * Calculates the Mean Absolute Error (MAE), another common loss function for regression tasks.
157
+ *
158
+ * MAE measures the average of the absolute differences between predictions and actual values.
159
+ * Compared to MSE, it is less sensitive to outliers because errors are not squared.
160
+ *
161
+ * @see {@link https://en.wikipedia.org/wiki/Mean_absolute_error}
162
+ * @param {number[]} targets - An array of target numerical values.
163
+ * @param {number[]} outputs - An array of output values from the network.
164
+ * @returns {number} The mean absolute error.
165
+ * @throws {Error} If the target and output arrays have different lengths (implicitly via forEach).
166
+ */
167
+ static mae(targets: number[], outputs: number[]): number {
168
+ if (targets.length !== outputs.length) {
169
+ throw new Error('Target and output arrays must have the same length.');
170
+ }
171
+ let error = 0;
172
+
173
+ // Assumes targets and outputs have the same length.
174
+ outputs.forEach((output, outputIndex) => {
175
+ // Calculate the absolute difference for each sample.
176
+ error += Math.abs(targets[outputIndex] - output);
177
+ });
178
+
179
+ // Return the average absolute error.
180
+ return error / outputs.length;
181
+ }
182
+
183
+ /**
184
+ * Calculates the Mean Absolute Percentage Error (MAPE).
185
+ *
186
+ * MAPE expresses the error as a percentage of the actual value. It can be useful
187
+ * for understanding the error relative to the magnitude of the target values.
188
+ * However, it has limitations: it's undefined when the target value is zero and
189
+ * can be skewed by target values close to zero.
190
+ *
191
+ * @see {@link https://en.wikipedia.org/wiki/Mean_absolute_percentage_error}
192
+ * @param {number[]} targets - An array of target numerical values. Should not contain zeros for standard MAPE.
193
+ * @param {number[]} outputs - An array of output values from the network.
194
+ * @returns {number} The mean absolute percentage error, expressed as a proportion (e.g., 0.1 for 10%).
195
+ * @throws {Error} If the target and output arrays have different lengths (implicitly via forEach).
196
+ */
197
+ static mape(targets: number[], outputs: number[]): number {
198
+ if (targets.length !== outputs.length) {
199
+ throw new Error('Target and output arrays must have the same length.');
200
+ }
201
+ let error = 0;
202
+ const epsilon = PROB_EPSILON; // Small constant to avoid division by zero or near-zero target values.
203
+
204
+ // Assumes targets and outputs have the same length.
205
+ outputs.forEach((output, outputIndex) => {
206
+ const target = targets[outputIndex];
207
+ // Calculate the absolute percentage error for each sample.
208
+ // Use Math.max with epsilon to prevent division by zero.
209
+ error += Math.abs(
210
+ (target - output) / Math.max(Math.abs(target), epsilon)
211
+ );
212
+ });
213
+
214
+ // Return the average absolute percentage error (as a proportion).
215
+ // Multiply by 100 if a percentage value is desired.
216
+ return error / outputs.length;
217
+ }
218
+
219
+ /**
220
+ * Calculates the Mean Squared Logarithmic Error (MSLE).
221
+ *
222
+ * MSLE is often used in regression tasks where the target values span a large range
223
+ * or when penalizing under-predictions more than over-predictions is desired.
224
+ * It measures the squared difference between the logarithms of the predicted and actual values.
225
+ * Uses `log(1 + x)` instead of `log(x)` for numerical stability and to handle inputs of 0.
226
+ * Assumes both targets and outputs are non-negative.
227
+ *
228
+ * @see {@link https://peltarion.com/knowledge-center/documentation/modeling-view/build-an-ai-model/loss-functions/mean-squared-logarithmic-error}
229
+ * @param {number[]} targets - An array of target numerical values (assumed >= 0).
230
+ * @param {number[]} outputs - An array of output values from the network (assumed >= 0).
231
+ * @returns {number} The mean squared logarithmic error.
232
+ * @throws {Error} If the target and output arrays have different lengths (implicitly via forEach).
233
+ */
234
+ static msle(targets: number[], outputs: number[]): number {
235
+ if (targets.length !== outputs.length) {
236
+ throw new Error('Target and output arrays must have the same length.');
237
+ }
238
+ let error = 0;
239
+
240
+ // Assumes targets and outputs have the same length.
241
+ outputs.forEach((output, outputIndex) => {
242
+ const target = targets[outputIndex];
243
+ // Ensure inputs are non-negative before adding 1 for the logarithm.
244
+ // Using log(1 + x) avoids issues with log(0) and handles values >= 0.
245
+ const logTarget = Math.log(Math.max(target, 0) + 1);
246
+ const logOutput = Math.log(Math.max(output, 0) + 1);
247
+ // Calculate the squared difference of the logarithms.
248
+ error += Math.pow(logTarget - logOutput, 2);
249
+ });
250
+
251
+ // Return the average squared logarithmic error.
252
+ return error / outputs.length;
253
+ }
254
+
255
+ /**
256
+ * Calculates the Mean Hinge loss, primarily used for "maximum-margin" classification,
257
+ * most notably for Support Vector Machines (SVMs).
258
+ *
259
+ * Hinge loss is used for training classifiers. It penalizes predictions that are
260
+ * not only incorrect but also those that are correct but not confident (i.e., close to the decision boundary).
261
+ * Assumes target values are encoded as -1 or 1.
262
+ *
263
+ * @see {@link https://en.wikipedia.org/wiki/Hinge_loss}
264
+ * @param {number[]} targets - An array of target values, expected to be -1 or 1.
265
+ * @param {number[]} outputs - An array of output values from the network (raw scores, not necessarily probabilities).
266
+ * @returns {number} The mean hinge loss.
267
+ * @throws {Error} If the target and output arrays have different lengths (implicitly via forEach).
268
+ */
269
+ static hinge(targets: number[], outputs: number[]): number {
270
+ if (targets.length !== outputs.length) {
271
+ throw new Error('Target and output arrays must have the same length.');
272
+ }
273
+ let error = 0;
274
+
275
+ // Assumes targets and outputs have the same length.
276
+ outputs.forEach((output, outputIndex) => {
277
+ const target = targets[outputIndex]; // Should be -1 or 1 for standard hinge loss.
278
+ // The term `target * output` should be >= 1 for a correct and confident prediction.
279
+ // Loss is incurred if `target * output < 1`.
280
+ error += Math.max(0, 1 - target * output);
281
+ });
282
+
283
+ // Return the average hinge loss.
284
+ return error / outputs.length;
285
+ }
286
+
287
+ /**
288
+ * Calculates the Focal Loss, which is useful for addressing class imbalance in classification tasks.
289
+ * Focal loss down-weights easy examples and focuses training on hard negatives.
290
+ *
291
+ * @see https://arxiv.org/abs/1708.02002
292
+ * @param {number[]} targets - Array of target values (0 or 1 for binary, or probabilities for soft labels).
293
+ * @param {number[]} outputs - Array of predicted probabilities (between 0 and 1).
294
+ * @param {number} gamma - Focusing parameter (default 2).
295
+ * @param {number} alpha - Balancing parameter (default 0.25).
296
+ * @returns {number} The mean focal loss.
297
+ */
298
+ static focalLoss(
299
+ targets: number[],
300
+ outputs: number[],
301
+ gamma: number = 2,
302
+ alpha: number = 0.25
303
+ ): number {
304
+ let error = 0;
305
+ const epsilon = PROB_EPSILON;
306
+ if (targets.length !== outputs.length) {
307
+ throw new Error('Target and output arrays must have the same length.');
308
+ }
309
+ for (let i = 0; i < outputs.length; i++) {
310
+ const t = targets[i];
311
+ const p = Math.max(epsilon, Math.min(1 - epsilon, outputs[i]));
312
+ const pt = t === 1 ? p : 1 - p;
313
+ const a = t === 1 ? alpha : 1 - alpha;
314
+ error += -a * Math.pow(1 - pt, gamma) * Math.log(pt);
315
+ }
316
+ return error / outputs.length;
317
+ }
318
+
319
+ /**
320
+ * Calculates the Cross Entropy with Label Smoothing.
321
+ * Label smoothing prevents the model from becoming overconfident by softening the targets.
322
+ *
323
+ * @see https://arxiv.org/abs/1512.00567
324
+ * @param {number[]} targets - Array of target values (0 or 1 for binary, or probabilities for soft labels).
325
+ * @param {number[]} outputs - Array of predicted probabilities (between 0 and 1).
326
+ * @param {number} smoothing - Smoothing factor (between 0 and 1, e.g., 0.1).
327
+ * @returns {number} The mean cross-entropy loss with label smoothing.
328
+ */
329
+ static labelSmoothing(
330
+ targets: number[],
331
+ outputs: number[],
332
+ smoothing: number = 0.1
333
+ ): number {
334
+ let error = 0;
335
+ const epsilon = PROB_EPSILON;
336
+ if (targets.length !== outputs.length) {
337
+ throw new Error('Target and output arrays must have the same length.');
338
+ }
339
+ for (let i = 0; i < outputs.length; i++) {
340
+ // Smooth the target: t_smooth = t * (1 - smoothing) + 0.5 * smoothing
341
+ const t = targets[i] * (1 - smoothing) + 0.5 * smoothing;
342
+ const p = Math.max(epsilon, Math.min(1 - epsilon, outputs[i]));
343
+ error -= t * Math.log(p) + (1 - t) * Math.log(1 - p);
344
+ }
345
+ return error / outputs.length;
346
+ }
347
+ }
@@ -0,0 +1,63 @@
1
+ /**
2
+ * Crossover methods for genetic algorithms.
3
+ *
4
+ * These methods implement the crossover strategies described in the Instinct algorithm,
5
+ * enabling the creation of offspring with unique combinations of parent traits.
6
+ *
7
+ * @see Instinct Algorithm - Section 2 Crossover
8
+ * @see {@link https://medium.com/data-science/neuro-evolution-on-steroids-82bd14ddc2f6}
9
+ * @see {@link https://en.wikipedia.org/wiki/Crossover_(genetic_algorithm)}
10
+ */
11
+ export const crossover = {
12
+ /**
13
+ * Single-point crossover.
14
+ * A single crossover point is selected, and genes are exchanged between parents up to this point.
15
+ * This method is particularly useful for binary-encoded genomes.
16
+ *
17
+ * @property {string} name - The name of the crossover method.
18
+ * @property {number[]} config - Configuration for the crossover point.
19
+ * @see {@link https://en.wikipedia.org/wiki/Crossover_(genetic_algorithm)#One-point_crossover}
20
+ */
21
+ SINGLE_POINT: {
22
+ name: 'SINGLE_POINT',
23
+ config: [0.4],
24
+ },
25
+
26
+ /**
27
+ * Two-point crossover.
28
+ * Two crossover points are selected, and genes are exchanged between parents between these points.
29
+ * This method is an extension of single-point crossover and is often used for more complex genomes.
30
+ *
31
+ * @property {string} name - The name of the crossover method.
32
+ * @property {number[]} config - Configuration for the two crossover points.
33
+ * @see {@link https://en.wikipedia.org/wiki/Crossover_(genetic_algorithm)#Two-point_and_k-point_crossover}
34
+ */
35
+ TWO_POINT: {
36
+ name: 'TWO_POINT',
37
+ config: [0.4, 0.9],
38
+ },
39
+
40
+ /**
41
+ * Uniform crossover.
42
+ * Each gene is selected randomly from one of the parents with equal probability.
43
+ * This method provides a high level of genetic diversity in the offspring.
44
+ *
45
+ * @property {string} name - The name of the crossover method.
46
+ * @see {@link https://en.wikipedia.org/wiki/Crossover_(genetic_algorithm)#Uniform_crossover}
47
+ */
48
+ UNIFORM: {
49
+ name: 'UNIFORM',
50
+ },
51
+
52
+ /**
53
+ * Average crossover.
54
+ * The offspring's genes are the average of the parents' genes.
55
+ * This method is particularly useful for real-valued genomes.
56
+ *
57
+ * @property {string} name - The name of the crossover method.
58
+ * @see {@link https://en.wikipedia.org/wiki/Crossover_(genetic_algorithm)#Arithmetic_recombination}
59
+ */
60
+ AVERAGE: {
61
+ name: 'AVERAGE',
62
+ },
63
+ };
@@ -0,0 +1,43 @@
1
+ /**
2
+ * Defines different methods for gating connections between neurons or groups of neurons.
3
+ *
4
+ * Gating mechanisms dynamically control the flow of information through connections
5
+ * in a neural network. This allows the network to selectively route information,
6
+ * enabling more complex computations, memory functions, and adaptive behaviors.
7
+ * These mechanisms are inspired by biological neural processes where certain neurons
8
+ * can modulate the activity of others. Gating is particularly crucial in recurrent
9
+ * neural networks (RNNs) for managing information persistence over time.
10
+ *
11
+ * @see {@link https://en.wikipedia.org/wiki/Artificial_neural_network#Gating_mechanisms}
12
+ */
13
+ export const gating = {
14
+ /**
15
+ * Output Gating: The gating neuron(s) control the activation flowing *out*
16
+ * of the connection's target neuron(s). The connection's weight remains static,
17
+ * but the output signal from the target neuron is modulated by the gater's state.
18
+ * @property {string} name - Identifier for the output gating method.
19
+ */
20
+ OUTPUT: {
21
+ name: 'OUTPUT',
22
+ },
23
+
24
+ /**
25
+ * Input Gating: The gating neuron(s) control the activation flowing *into*
26
+ * the connection's target neuron(s). The connection effectively transmits
27
+ * `connection_weight * source_activation * gater_activation` to the target neuron.
28
+ * @property {string} name - Identifier for the input gating method.
29
+ */
30
+ INPUT: {
31
+ name: 'INPUT',
32
+ },
33
+
34
+ /**
35
+ * Self Gating: The gating neuron(s) directly modulate the *weight* or strength
36
+ * of the connection itself. The connection's effective weight becomes dynamic,
37
+ * influenced by the gater's activation state (`effective_weight = connection_weight * gater_activation`).
38
+ * @property {string} name - Identifier for the self-gating method.
39
+ */
40
+ SELF: {
41
+ name: 'SELF',
42
+ },
43
+ };
@@ -0,0 +1,8 @@
1
+ export { default as Cost } from './cost';
2
+ export { default as Rate } from './rate';
3
+ export { default as Activation } from './activation';
4
+ export { gating } from './gating';
5
+ export { mutation } from './mutation';
6
+ export { selection } from './selection';
7
+ export { crossover } from './crossover';
8
+ export { default as groupConnection } from './connection';