@reicek/neataptic-ts 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (272) hide show
  1. package/.github/ISSUE_TEMPLATE/bug_report.md +33 -0
  2. package/.github/ISSUE_TEMPLATE/feature_request.md +27 -0
  3. package/.github/PULL_REQUEST_TEMPLATE.md +28 -0
  4. package/.github/workflows/ci.yml +41 -0
  5. package/.github/workflows/deploy-pages.yml +29 -0
  6. package/.github/workflows/manual_release_pipeline.yml +62 -0
  7. package/.github/workflows/publish.yml +85 -0
  8. package/.github/workflows/release_dispatch.yml +38 -0
  9. package/.travis.yml +5 -0
  10. package/CONTRIBUTING.md +92 -0
  11. package/LICENSE +24 -0
  12. package/ONNX_EXPORT.md +87 -0
  13. package/README.md +1173 -0
  14. package/RELEASE.md +54 -0
  15. package/dist-docs/package.json +1 -0
  16. package/dist-docs/scripts/generate-docs.d.ts +2 -0
  17. package/dist-docs/scripts/generate-docs.d.ts.map +1 -0
  18. package/dist-docs/scripts/generate-docs.js +536 -0
  19. package/dist-docs/scripts/generate-docs.js.map +1 -0
  20. package/dist-docs/scripts/render-docs-html.d.ts +2 -0
  21. package/dist-docs/scripts/render-docs-html.d.ts.map +1 -0
  22. package/dist-docs/scripts/render-docs-html.js +148 -0
  23. package/dist-docs/scripts/render-docs-html.js.map +1 -0
  24. package/docs/FOLDERS.md +14 -0
  25. package/docs/README.md +1173 -0
  26. package/docs/architecture/README.md +1391 -0
  27. package/docs/architecture/index.html +938 -0
  28. package/docs/architecture/network/README.md +1210 -0
  29. package/docs/architecture/network/index.html +908 -0
  30. package/docs/assets/ascii-maze.bundle.js +16542 -0
  31. package/docs/assets/ascii-maze.bundle.js.map +7 -0
  32. package/docs/index.html +1419 -0
  33. package/docs/methods/README.md +670 -0
  34. package/docs/methods/index.html +477 -0
  35. package/docs/multithreading/README.md +274 -0
  36. package/docs/multithreading/index.html +215 -0
  37. package/docs/multithreading/workers/README.md +23 -0
  38. package/docs/multithreading/workers/browser/README.md +39 -0
  39. package/docs/multithreading/workers/browser/index.html +70 -0
  40. package/docs/multithreading/workers/index.html +57 -0
  41. package/docs/multithreading/workers/node/README.md +33 -0
  42. package/docs/multithreading/workers/node/index.html +66 -0
  43. package/docs/neat/README.md +1284 -0
  44. package/docs/neat/index.html +906 -0
  45. package/docs/src/README.md +2659 -0
  46. package/docs/src/index.html +1579 -0
  47. package/jest.config.ts +32 -0
  48. package/package.json +99 -0
  49. package/plans/HyperMorphoNEAT.md +293 -0
  50. package/plans/ONNX_EXPORT_PLAN.md +46 -0
  51. package/scripts/generate-docs.ts +486 -0
  52. package/scripts/render-docs-html.ts +138 -0
  53. package/scripts/types.d.ts +2 -0
  54. package/src/README.md +2659 -0
  55. package/src/architecture/README.md +1391 -0
  56. package/src/architecture/activationArrayPool.ts +135 -0
  57. package/src/architecture/architect.ts +635 -0
  58. package/src/architecture/connection.ts +148 -0
  59. package/src/architecture/group.ts +406 -0
  60. package/src/architecture/layer.ts +804 -0
  61. package/src/architecture/network/README.md +1210 -0
  62. package/src/architecture/network/network.activate.ts +223 -0
  63. package/src/architecture/network/network.connect.ts +157 -0
  64. package/src/architecture/network/network.deterministic.ts +167 -0
  65. package/src/architecture/network/network.evolve.ts +426 -0
  66. package/src/architecture/network/network.gating.ts +186 -0
  67. package/src/architecture/network/network.genetic.ts +247 -0
  68. package/src/architecture/network/network.mutate.ts +624 -0
  69. package/src/architecture/network/network.onnx.ts +463 -0
  70. package/src/architecture/network/network.prune.ts +216 -0
  71. package/src/architecture/network/network.remove.ts +96 -0
  72. package/src/architecture/network/network.serialize.ts +309 -0
  73. package/src/architecture/network/network.slab.ts +262 -0
  74. package/src/architecture/network/network.standalone.ts +246 -0
  75. package/src/architecture/network/network.stats.ts +59 -0
  76. package/src/architecture/network/network.topology.ts +86 -0
  77. package/src/architecture/network/network.training.ts +1278 -0
  78. package/src/architecture/network.ts +1302 -0
  79. package/src/architecture/node.ts +1288 -0
  80. package/src/architecture/onnx.ts +3 -0
  81. package/src/config.ts +83 -0
  82. package/src/methods/README.md +670 -0
  83. package/src/methods/activation.ts +372 -0
  84. package/src/methods/connection.ts +31 -0
  85. package/src/methods/cost.ts +347 -0
  86. package/src/methods/crossover.ts +63 -0
  87. package/src/methods/gating.ts +43 -0
  88. package/src/methods/methods.ts +8 -0
  89. package/src/methods/mutation.ts +300 -0
  90. package/src/methods/rate.ts +257 -0
  91. package/src/methods/selection.ts +65 -0
  92. package/src/multithreading/README.md +274 -0
  93. package/src/multithreading/multi.ts +339 -0
  94. package/src/multithreading/workers/README.md +23 -0
  95. package/src/multithreading/workers/browser/README.md +39 -0
  96. package/src/multithreading/workers/browser/testworker.ts +99 -0
  97. package/src/multithreading/workers/node/README.md +33 -0
  98. package/src/multithreading/workers/node/testworker.ts +72 -0
  99. package/src/multithreading/workers/node/worker.ts +70 -0
  100. package/src/multithreading/workers/workers.ts +22 -0
  101. package/src/neat/README.md +1284 -0
  102. package/src/neat/neat.adaptive.ts +544 -0
  103. package/src/neat/neat.compat.ts +164 -0
  104. package/src/neat/neat.constants.ts +20 -0
  105. package/src/neat/neat.diversity.ts +217 -0
  106. package/src/neat/neat.evaluate.ts +328 -0
  107. package/src/neat/neat.evolve.ts +1026 -0
  108. package/src/neat/neat.export.ts +249 -0
  109. package/src/neat/neat.helpers.ts +235 -0
  110. package/src/neat/neat.lineage.ts +220 -0
  111. package/src/neat/neat.multiobjective.ts +260 -0
  112. package/src/neat/neat.mutation.ts +718 -0
  113. package/src/neat/neat.objectives.ts +157 -0
  114. package/src/neat/neat.pruning.ts +190 -0
  115. package/src/neat/neat.selection.ts +269 -0
  116. package/src/neat/neat.speciation.ts +460 -0
  117. package/src/neat/neat.species.ts +151 -0
  118. package/src/neat/neat.telemetry.exports.ts +469 -0
  119. package/src/neat/neat.telemetry.ts +933 -0
  120. package/src/neat/neat.types.ts +275 -0
  121. package/src/neat.ts +1042 -0
  122. package/src/neataptic.ts +10 -0
  123. package/test/architecture/activationArrayPool.capacity.test.ts +19 -0
  124. package/test/architecture/activationArrayPool.test.ts +46 -0
  125. package/test/architecture/connection.test.ts +290 -0
  126. package/test/architecture/group.test.ts +950 -0
  127. package/test/architecture/layer.test.ts +1535 -0
  128. package/test/architecture/network.pruning.test.ts +65 -0
  129. package/test/architecture/node.test.ts +1602 -0
  130. package/test/examples/asciiMaze/asciiMaze.e2e.test.ts +499 -0
  131. package/test/examples/asciiMaze/asciiMaze.ts +41 -0
  132. package/test/examples/asciiMaze/browser-entry.ts +164 -0
  133. package/test/examples/asciiMaze/browserLogger.ts +221 -0
  134. package/test/examples/asciiMaze/browserTerminalUtility.ts +48 -0
  135. package/test/examples/asciiMaze/colors.ts +119 -0
  136. package/test/examples/asciiMaze/dashboardManager.ts +968 -0
  137. package/test/examples/asciiMaze/evolutionEngine.ts +1248 -0
  138. package/test/examples/asciiMaze/fitness.ts +136 -0
  139. package/test/examples/asciiMaze/index.html +128 -0
  140. package/test/examples/asciiMaze/index.ts +26 -0
  141. package/test/examples/asciiMaze/interfaces.ts +235 -0
  142. package/test/examples/asciiMaze/mazeMovement.ts +996 -0
  143. package/test/examples/asciiMaze/mazeUtils.ts +278 -0
  144. package/test/examples/asciiMaze/mazeVision.ts +402 -0
  145. package/test/examples/asciiMaze/mazeVisualization.ts +585 -0
  146. package/test/examples/asciiMaze/mazes.ts +245 -0
  147. package/test/examples/asciiMaze/networkRefinement.ts +76 -0
  148. package/test/examples/asciiMaze/networkVisualization.ts +901 -0
  149. package/test/examples/asciiMaze/terminalUtility.ts +73 -0
  150. package/test/methods/activation.test.ts +1142 -0
  151. package/test/methods/connection.test.ts +146 -0
  152. package/test/methods/cost.test.ts +1123 -0
  153. package/test/methods/crossover.test.ts +202 -0
  154. package/test/methods/gating.test.ts +144 -0
  155. package/test/methods/mutation.test.ts +451 -0
  156. package/test/methods/optimizers.advanced.test.ts +80 -0
  157. package/test/methods/optimizers.behavior.test.ts +105 -0
  158. package/test/methods/optimizers.formula.test.ts +89 -0
  159. package/test/methods/rate.cosineWarmRestarts.test.ts +44 -0
  160. package/test/methods/rate.linearWarmupDecay.test.ts +41 -0
  161. package/test/methods/rate.reduceOnPlateau.test.ts +45 -0
  162. package/test/methods/rate.test.ts +684 -0
  163. package/test/methods/selection.test.ts +245 -0
  164. package/test/multithreading/activations.functions.test.ts +54 -0
  165. package/test/multithreading/multi.test.ts +290 -0
  166. package/test/multithreading/worker.node.process.test.ts +39 -0
  167. package/test/multithreading/workers.coverage.test.ts +36 -0
  168. package/test/multithreading/workers.dynamic.import.test.ts +8 -0
  169. package/test/neat/neat.adaptive.complexityBudget.test.ts +34 -0
  170. package/test/neat/neat.adaptive.criterion.complexity.test.ts +50 -0
  171. package/test/neat/neat.adaptive.mutation.strategy.test.ts +37 -0
  172. package/test/neat/neat.adaptive.operator.decay.test.ts +31 -0
  173. package/test/neat/neat.adaptive.phasedComplexity.test.ts +25 -0
  174. package/test/neat/neat.adaptive.pruning.test.ts +25 -0
  175. package/test/neat/neat.adaptive.targetSpecies.test.ts +43 -0
  176. package/test/neat/neat.additional.coverage.test.ts +126 -0
  177. package/test/neat/neat.advanced.enhancements.test.ts +85 -0
  178. package/test/neat/neat.advanced.test.ts +589 -0
  179. package/test/neat/neat.diversity.autocompat.test.ts +47 -0
  180. package/test/neat/neat.diversity.metrics.test.ts +21 -0
  181. package/test/neat/neat.diversity.stats.test.ts +44 -0
  182. package/test/neat/neat.enhancements.test.ts +79 -0
  183. package/test/neat/neat.entropy.ancestorAdaptive.test.ts +133 -0
  184. package/test/neat/neat.entropy.compat.csv.test.ts +108 -0
  185. package/test/neat/neat.evolution.pruning.test.ts +39 -0
  186. package/test/neat/neat.fastmode.autotune.test.ts +42 -0
  187. package/test/neat/neat.innovation.test.ts +134 -0
  188. package/test/neat/neat.lineage.antibreeding.test.ts +35 -0
  189. package/test/neat/neat.lineage.entropy.test.ts +56 -0
  190. package/test/neat/neat.lineage.inbreeding.test.ts +49 -0
  191. package/test/neat/neat.lineage.pressure.test.ts +29 -0
  192. package/test/neat/neat.multiobjective.adaptive.test.ts +57 -0
  193. package/test/neat/neat.multiobjective.dynamic.schedule.test.ts +46 -0
  194. package/test/neat/neat.multiobjective.dynamic.test.ts +31 -0
  195. package/test/neat/neat.multiobjective.fastsort.delegation.test.ts +51 -0
  196. package/test/neat/neat.multiobjective.prune.test.ts +39 -0
  197. package/test/neat/neat.multiobjective.test.ts +21 -0
  198. package/test/neat/neat.mutation.undefined.pool.test.ts +24 -0
  199. package/test/neat/neat.objective.events.test.ts +26 -0
  200. package/test/neat/neat.objective.importance.test.ts +21 -0
  201. package/test/neat/neat.objective.lifetimes.test.ts +33 -0
  202. package/test/neat/neat.offspring.allocation.test.ts +22 -0
  203. package/test/neat/neat.operator.bandit.test.ts +17 -0
  204. package/test/neat/neat.operator.phases.test.ts +38 -0
  205. package/test/neat/neat.pruneInactive.behavior.test.ts +54 -0
  206. package/test/neat/neat.reenable.adaptation.test.ts +18 -0
  207. package/test/neat/neat.rng.state.test.ts +22 -0
  208. package/test/neat/neat.spawn.add.test.ts +123 -0
  209. package/test/neat/neat.speciation.test.ts +96 -0
  210. package/test/neat/neat.species.allocation.telemetry.test.ts +26 -0
  211. package/test/neat/neat.species.history.csv.test.ts +24 -0
  212. package/test/neat/neat.telemetry.advanced.test.ts +226 -0
  213. package/test/neat/neat.telemetry.csv.lineage.test.ts +19 -0
  214. package/test/neat/neat.telemetry.parity.test.ts +42 -0
  215. package/test/neat/neat.telemetry.stream.test.ts +19 -0
  216. package/test/neat/neat.telemetry.test.ts +16 -0
  217. package/test/neat/neat.test.ts +422 -0
  218. package/test/neat/neat.utilities.test.ts +44 -0
  219. package/test/network/__suppress_console.ts +9 -0
  220. package/test/network/acyclic.topoorder.test.ts +17 -0
  221. package/test/network/checkpoint.metricshook.test.ts +36 -0
  222. package/test/network/error.handling.test.ts +581 -0
  223. package/test/network/evolution.test.ts +285 -0
  224. package/test/network/genetic.test.ts +208 -0
  225. package/test/network/learning.capability.test.ts +244 -0
  226. package/test/network/mutation.effects.test.ts +492 -0
  227. package/test/network/network.activate.test.ts +115 -0
  228. package/test/network/network.activateBatch.test.ts +30 -0
  229. package/test/network/network.deterministic.test.ts +64 -0
  230. package/test/network/network.evolve.branches.test.ts +75 -0
  231. package/test/network/network.evolve.multithread.branches.test.ts +83 -0
  232. package/test/network/network.evolve.test.ts +100 -0
  233. package/test/network/network.gating.removal.test.ts +93 -0
  234. package/test/network/network.mutate.additional.test.ts +145 -0
  235. package/test/network/network.mutate.edgecases.test.ts +101 -0
  236. package/test/network/network.mutate.test.ts +101 -0
  237. package/test/network/network.prune.earlyexit.test.ts +38 -0
  238. package/test/network/network.remove.errors.test.ts +45 -0
  239. package/test/network/network.slab.fallbacks.test.ts +22 -0
  240. package/test/network/network.stats.test.ts +45 -0
  241. package/test/network/network.training.advanced.test.ts +149 -0
  242. package/test/network/network.training.basic.test.ts +228 -0
  243. package/test/network/network.training.helpers.test.ts +183 -0
  244. package/test/network/onnx.export.test.ts +310 -0
  245. package/test/network/onnx.import.test.ts +129 -0
  246. package/test/network/pruning.topology.test.ts +282 -0
  247. package/test/network/regularization.determinism.test.ts +83 -0
  248. package/test/network/regularization.dropconnect.test.ts +17 -0
  249. package/test/network/regularization.dropconnect.validation.test.ts +18 -0
  250. package/test/network/regularization.stochasticdepth.test.ts +27 -0
  251. package/test/network/regularization.test.ts +843 -0
  252. package/test/network/regularization.weightnoise.test.ts +30 -0
  253. package/test/network/setupTests.ts +2 -0
  254. package/test/network/standalone.test.ts +332 -0
  255. package/test/network/structure.serialization.test.ts +660 -0
  256. package/test/training/training.determinism.mixed-precision.test.ts +134 -0
  257. package/test/training/training.earlystopping.test.ts +91 -0
  258. package/test/training/training.edge-cases.test.ts +91 -0
  259. package/test/training/training.extensions.test.ts +47 -0
  260. package/test/training/training.gradient.features.test.ts +110 -0
  261. package/test/training/training.gradient.refinements.test.ts +170 -0
  262. package/test/training/training.gradient.separate-bias.test.ts +41 -0
  263. package/test/training/training.optimizer.test.ts +48 -0
  264. package/test/training/training.plateau.smoothing.test.ts +58 -0
  265. package/test/training/training.smoothing.types.test.ts +174 -0
  266. package/test/training/training.train.options.coverage.test.ts +52 -0
  267. package/test/utils/console-helper.ts +76 -0
  268. package/test/utils/jest-setup.ts +60 -0
  269. package/test/utils/test-helpers.ts +175 -0
  270. package/tsconfig.docs.json +12 -0
  271. package/tsconfig.json +21 -0
  272. package/webpack.config.js +49 -0
@@ -0,0 +1,1391 @@
1
+ # architecture
2
+
3
+ ## architecture/activationArrayPool.ts
4
+
5
+ ### ActivationArray
6
+
7
+ Allowed activation array shapes for pooling.
8
+ - number[]: default JS array
9
+ - Float32Array: compact typed array when float32 mode is enabled
10
+ - Float64Array: supported for compatibility with typed math paths
11
+
12
+ ### activationArrayPool
13
+
14
+ ### ActivationArrayPool
15
+
16
+ A size-bucketed pool of activation arrays.
17
+
18
+ Buckets map array length -> stack of arrays. Acquire pops and zero-fills, or
19
+ allocates a new array when empty. Release pushes back up to a configurable
20
+ per-bucket cap to avoid unbounded growth.
21
+
22
+ Note: not thread-safe; intended for typical single-threaded JS execution.
23
+
24
+ ## architecture/architect.ts
25
+
26
+ ### architect
27
+
28
+ Provides static methods for constructing various predefined neural network architectures.
29
+
30
+ The Architect class simplifies the creation of common network types like Multi-Layer Perceptrons (MLPs),
31
+ Long Short-Term Memory (LSTM) networks, Gated Recurrent Units (GRUs), and more complex structures
32
+ inspired by neuro-evolutionary algorithms. It leverages the underlying `Layer`, `Group`, and `Node`
33
+ components to build interconnected `Network` objects.
34
+
35
+ Methods often utilize helper functions from `Layer` (e.g., `Layer.dense`, `Layer.lstm`) and
36
+ connection strategies from `methods.groupConnection`.
37
+
38
+ ### Architect
39
+
40
+ Provides static methods for constructing various predefined neural network architectures.
41
+
42
+ The Architect class simplifies the creation of common network types like Multi-Layer Perceptrons (MLPs),
43
+ Long Short-Term Memory (LSTM) networks, Gated Recurrent Units (GRUs), and more complex structures
44
+ inspired by neuro-evolutionary algorithms. It leverages the underlying `Layer`, `Group`, and `Node`
45
+ components to build interconnected `Network` objects.
46
+
47
+ Methods often utilize helper functions from `Layer` (e.g., `Layer.dense`, `Layer.lstm`) and
48
+ connection strategies from `methods.groupConnection`.
49
+
50
+ ### default
51
+
52
+ #### construct
53
+
54
+ `(list: (import("D:/code-practice/NeatapticTS/src/architecture/node").default | import("D:/code-practice/NeatapticTS/src/architecture/layer").default | import("D:/code-practice/NeatapticTS/src/architecture/group").default)[]) => import("D:/code-practice/NeatapticTS/src/architecture/network").default`
55
+
56
+ Constructs a Network instance from an array of interconnected Layers, Groups, or Nodes.
57
+
58
+ This method processes the input list, extracts all unique nodes, identifies connections,
59
+ gates, and self-connections, and determines the network's input and output sizes based
60
+ on the `type` property ('input' or 'output') set on the nodes. It uses Sets internally
61
+ for efficient handling of unique elements during construction.
62
+
63
+ Parameters:
64
+ - `` - - An array containing the building blocks (Nodes, Layers, Groups) of the network, assumed to be already interconnected.
65
+
66
+ Returns: A Network object representing the constructed architecture.
67
+
68
+ #### enforceMinimumHiddenLayerSizes
69
+
70
+ `(network: import("D:/code-practice/NeatapticTS/src/architecture/network").default) => import("D:/code-practice/NeatapticTS/src/architecture/network").default`
71
+
72
+ Enforces the minimum hidden layer size rule on a network.
73
+
74
+ This ensures that all hidden layers have at least min(input, output) + 1 nodes,
75
+ which is a common heuristic to ensure networks have adequate representation capacity.
76
+
77
+ Parameters:
78
+ - `` - - The network to enforce minimum hidden layer sizes on
79
+
80
+ Returns: The same network with properly sized hidden layers
81
+
82
+ #### gru
83
+
84
+ `(layers: number[]) => import("D:/code-practice/NeatapticTS/src/architecture/network").default`
85
+
86
+ Creates a Gated Recurrent Unit (GRU) network.
87
+ GRUs are another type of recurrent neural network, similar to LSTMs but often simpler.
88
+ This constructor uses `Layer.gru` to create the core GRU blocks.
89
+
90
+ Parameters:
91
+ - `` - - A sequence of numbers representing the size (number of units) of each layer: input layer size, hidden GRU layer sizes..., output layer size. Must include at least input, one hidden, and output layer sizes.
92
+
93
+ Returns: The constructed GRU network.
94
+
95
+ #### hopfield
96
+
97
+ `(size: number) => import("D:/code-practice/NeatapticTS/src/architecture/network").default`
98
+
99
+ Creates a Hopfield network.
100
+ Hopfield networks are a form of recurrent neural network often used for associative memory tasks.
101
+ This implementation creates a simple, fully connected structure.
102
+
103
+ Parameters:
104
+ - `` - - The number of nodes in the network (input and output layers will have this size).
105
+
106
+ Returns: The constructed Hopfield network.
107
+
108
+ #### lstm
109
+
110
+ `(layerArgs: (number | { inputToOutput?: boolean | undefined; })[]) => import("D:/code-practice/NeatapticTS/src/architecture/network").default`
111
+
112
+ Creates a Long Short-Term Memory (LSTM) network.
113
+ LSTMs are a type of recurrent neural network (RNN) capable of learning long-range dependencies.
114
+ This constructor uses `Layer.lstm` to create the core LSTM blocks.
115
+
116
+ Parameters:
117
+ - `` - - A sequence of arguments defining the network structure:
118
+ - Numbers represent the size (number of units) of each layer: input layer size, hidden LSTM layer sizes..., output layer size.
119
+ - An optional configuration object can be provided as the last argument.
120
+ - `` - - Configuration options (if passed as the last argument).
121
+
122
+ Returns: The constructed LSTM network.
123
+
124
+ #### narx
125
+
126
+ `(inputSize: number, hiddenLayers: number | number[], outputSize: number, previousInput: number, previousOutput: number) => import("D:/code-practice/NeatapticTS/src/architecture/network").default`
127
+
128
+ Creates a Nonlinear AutoRegressive network with eXogenous inputs (NARX).
129
+ NARX networks are recurrent networks often used for time series prediction.
130
+ They predict the next value of a time series based on previous values of the series
131
+ and previous values of external (exogenous) input series.
132
+
133
+ Parameters:
134
+ - `` - - The number of input nodes for the exogenous inputs at each time step.
135
+ - `` - - The size of the hidden layer(s). Can be a single number for one hidden layer, or an array of numbers for multiple hidden layers. Use 0 or [] for no hidden layers.
136
+ - `` - - The number of output nodes (predicting the time series).
137
+ - `` - - The number of past time steps of the exogenous input to feed back into the network.
138
+ - `` - - The number of past time steps of the network's own output to feed back into the network (autoregressive part).
139
+
140
+ Returns: The constructed NARX network.
141
+
142
+ #### perceptron
143
+
144
+ `(layers: number[]) => import("D:/code-practice/NeatapticTS/src/architecture/network").default`
145
+
146
+ Creates a standard Multi-Layer Perceptron (MLP) network.
147
+ An MLP consists of an input layer, one or more hidden layers, and an output layer,
148
+ fully connected layer by layer.
149
+
150
+ Parameters:
151
+ - `` - - A sequence of numbers representing the size (number of nodes) of each layer, starting with the input layer, followed by hidden layers, and ending with the output layer. Must include at least input, one hidden, and output layer sizes.
152
+
153
+ Returns: The constructed MLP network.
154
+
155
+ #### random
156
+
157
+ `(input: number, hidden: number, output: number, options: { connections?: number | undefined; backconnections?: number | undefined; selfconnections?: number | undefined; gates?: number | undefined; }) => import("D:/code-practice/NeatapticTS/src/architecture/network").default`
158
+
159
+ Creates a randomly structured network based on specified node counts and connection options.
160
+
161
+ This method allows for the generation of networks with a less rigid structure than MLPs.
162
+ It initializes a network with input and output nodes and then iteratively adds hidden nodes
163
+ and various types of connections (forward, backward, self) and gates using mutation methods.
164
+ This approach is inspired by neuro-evolution techniques where network topology evolves.
165
+
166
+ Parameters:
167
+ - `` - - The number of input nodes.
168
+ - `` - - The number of hidden nodes to add.
169
+ - `` - - The number of output nodes.
170
+ - `` - - Optional configuration for the network structure.
171
+
172
+ Returns: The constructed network with a randomized topology.
173
+
174
+ ## architecture/connection.ts
175
+
176
+ ### connection
177
+
178
+ ### default
179
+
180
+ #### acquire
181
+
182
+ `(from: import("D:/code-practice/NeatapticTS/src/architecture/node").default, to: import("D:/code-practice/NeatapticTS/src/architecture/node").default, weight: number | undefined) => import("D:/code-practice/NeatapticTS/src/architecture/connection").default`
183
+
184
+ Acquire a Connection from the pool or construct a new one. Ensures fresh innovation id.
185
+
186
+ #### innovationID
187
+
188
+ `(a: number, b: number) => number`
189
+
190
+ Generates a unique innovation ID for the connection.
191
+
192
+ The innovation ID is calculated using the Cantor pairing function, which maps two integers
193
+ (representing the source and target nodes) to a unique integer.
194
+
195
+ Parameters:
196
+ - `` - - The ID of the source node.
197
+ - `` - - The ID of the target node.
198
+
199
+ Returns: The innovation ID based on the Cantor pairing function.
200
+
201
+ #### release
202
+
203
+ `(conn: import("D:/code-practice/NeatapticTS/src/architecture/connection").default) => void`
204
+
205
+ Return a Connection to the pool for reuse.
206
+
207
+ #### toJSON
208
+
209
+ `() => any`
210
+
211
+ Converts the connection to a JSON object for serialization.
212
+
213
+ Returns: A JSON representation of the connection.
214
+
215
+ ## architecture/group.ts
216
+
217
+ ### group
218
+
219
+ Represents a collection of nodes functioning as a single unit within a network architecture.
220
+ Groups facilitate operations like collective activation, propagation, and connection management.
221
+
222
+ ### Group
223
+
224
+ Represents a collection of nodes functioning as a single unit within a network architecture.
225
+ Groups facilitate operations like collective activation, propagation, and connection management.
226
+
227
+ ### default
228
+
229
+ #### activate
230
+
231
+ `(value: number[] | undefined) => number[]`
232
+
233
+ Activates all nodes in the group. If input values are provided, they are assigned
234
+ sequentially to the nodes before activation. Otherwise, nodes activate based on their
235
+ existing states and incoming connections.
236
+
237
+ Parameters:
238
+ - `` - - An optional array of input values. If provided, its length must match the number of nodes in the group.
239
+
240
+ Returns: An array containing the activation value of each node in the group, in order.
241
+
242
+ #### clear
243
+
244
+ `() => void`
245
+
246
+ Resets the state of all nodes in the group. This typically involves clearing
247
+ activation values, state, and propagated errors, preparing the group for a new input pattern,
248
+ especially relevant in recurrent networks or sequence processing.
249
+
250
+ #### connect
251
+
252
+ `(target: import("D:/code-practice/NeatapticTS/src/architecture/node").default | import("D:/code-practice/NeatapticTS/src/architecture/layer").default | import("D:/code-practice/NeatapticTS/src/architecture/group").default, method: any, weight: number | undefined) => any[]`
253
+
254
+ Establishes connections from all nodes in this group to a target Group, Layer, or Node.
255
+ The connection pattern (e.g., all-to-all, one-to-one) can be specified.
256
+
257
+ Parameters:
258
+ - `` - - The destination entity (Group, Layer, or Node) to connect to.
259
+ - `` - - The connection method/type (e.g., `methods.groupConnection.ALL_TO_ALL`, `methods.groupConnection.ONE_TO_ONE`). Defaults depend on the target type and whether it's the same group.
260
+ - `` - - An optional fixed weight to assign to all created connections. If not provided, weights might be initialized randomly or based on node defaults.
261
+
262
+ Returns: An array containing all the connection objects created. Consider using a more specific type like `Connection[]`.
263
+
264
+ #### connections
265
+
266
+ Stores connection information related to this group.
267
+ `in`: Connections coming into any node in this group from outside.
268
+ `out`: Connections going out from any node in this group to outside.
269
+ `self`: Connections between nodes within this same group (e.g., in ONE_TO_ONE connections).
270
+
271
+ #### disconnect
272
+
273
+ `(target: import("D:/code-practice/NeatapticTS/src/architecture/node").default | import("D:/code-practice/NeatapticTS/src/architecture/group").default, twosided: boolean) => void`
274
+
275
+ Removes connections between nodes in this group and a target Group or Node.
276
+
277
+ Parameters:
278
+ - `` - - The Group or Node to disconnect from.
279
+ - `` - - If true, also removes connections originating from the `target` and ending in this group. Defaults to false (only removes connections from this group to the target).
280
+
281
+ #### gate
282
+
283
+ `(connections: any, method: any) => void`
284
+
285
+ Configures nodes within this group to act as gates for the specified connection(s).
286
+ Gating allows the output of a node in this group to modulate the flow of signal through the gated connection.
287
+
288
+ Parameters:
289
+ - `` - - A single connection object or an array of connection objects to be gated. Consider using a more specific type like `Connection | Connection[]`.
290
+ - `` - - The gating mechanism to use (e.g., `methods.gating.INPUT`, `methods.gating.OUTPUT`, `methods.gating.SELF`). Specifies which part of the connection is influenced by the gater node.
291
+
292
+ #### nodes
293
+
294
+ An array holding all the nodes within this group.
295
+
296
+ #### propagate
297
+
298
+ `(rate: number, momentum: number, target: number[] | undefined) => void`
299
+
300
+ Propagates the error backward through all nodes in the group. If target values are provided,
301
+ the error is calculated against these targets (typically for output layers). Otherwise,
302
+ the error is calculated based on the error propagated from subsequent layers/nodes.
303
+
304
+ Parameters:
305
+ - `` - - The learning rate to apply during weight updates.
306
+ - `` - - The momentum factor to apply during weight updates.
307
+ - `` - - Optional target values for error calculation. If provided, its length must match the number of nodes.
308
+
309
+ #### set
310
+
311
+ `(values: { bias?: number | undefined; squash?: any; type?: string | undefined; }) => void`
312
+
313
+ Sets specific properties (like bias, squash function, or type) for all nodes within the group.
314
+
315
+ Parameters:
316
+ - `` - - An object containing the properties and their new values. Only provided properties are updated.
317
+ `bias`: Sets the bias term for all nodes.
318
+ `squash`: Sets the activation function (squashing function) for all nodes.
319
+ `type`: Sets the node type (e.g., 'input', 'hidden', 'output') for all nodes.
320
+
321
+ #### toJSON
322
+
323
+ `() => { size: number; nodeIndices: (number | undefined)[]; connections: { in: number; out: number; self: number; }; }`
324
+
325
+ Serializes the group into a JSON-compatible format, avoiding circular references.
326
+ Only includes node indices and connection counts.
327
+
328
+ Returns: A JSON-compatible representation of the group.
329
+
330
+ ## architecture/layer.ts
331
+
332
+ ### layer
333
+
334
+ Represents a functional layer within a neural network architecture.
335
+
336
+ Layers act as organizational units for nodes, facilitating the creation of
337
+ complex network structures like Dense, LSTM, GRU, or Memory layers.
338
+ They manage the collective behavior of their nodes, including activation,
339
+ propagation, and connection to other network components.
340
+
341
+ ### Layer
342
+
343
+ Represents a functional layer within a neural network architecture.
344
+
345
+ Layers act as organizational units for nodes, facilitating the creation of
346
+ complex network structures like Dense, LSTM, GRU, or Memory layers.
347
+ They manage the collective behavior of their nodes, including activation,
348
+ propagation, and connection to other network components.
349
+
350
+ ### default
351
+
352
+ #### activate
353
+
354
+ `(value: number[] | undefined, training: boolean) => number[]`
355
+
356
+ Activates all nodes within the layer, computing their output values.
357
+
358
+ If an input `value` array is provided, it's used as the initial activation
359
+ for the corresponding nodes in the layer. Otherwise, nodes compute their
360
+ activation based on their incoming connections.
361
+
362
+ During training, layer-level dropout is applied, masking all nodes in the layer together.
363
+ During inference, all masks are set to 1.
364
+
365
+ Parameters:
366
+ - `value` - - An optional array of activation values to set for the layer's nodes. The length must match the number of nodes.
367
+ - `training` - - A boolean indicating whether the layer is in training mode. Defaults to false.
368
+
369
+ Returns: An array containing the activation value of each node in the layer after activation.
370
+
371
+ #### attention
372
+
373
+ `(size: number, heads: number) => import("D:/code-practice/NeatapticTS/src/architecture/layer").default`
374
+
375
+ Creates a multi-head self-attention layer (stub implementation).
376
+
377
+ Parameters:
378
+ - `size` - - Number of output nodes.
379
+ - `heads` - - Number of attention heads (default 1).
380
+
381
+ Returns: A new Layer instance representing an attention layer.
382
+
383
+ #### batchNorm
384
+
385
+ `(size: number) => import("D:/code-practice/NeatapticTS/src/architecture/layer").default`
386
+
387
+ Creates a batch normalization layer.
388
+ Applies batch normalization to the activations of the nodes in this layer during activation.
389
+
390
+ Parameters:
391
+ - `size` - - The number of nodes in this layer.
392
+
393
+ Returns: A new Layer instance configured as a batch normalization layer.
394
+
395
+ #### clear
396
+
397
+ `() => void`
398
+
399
+ Resets the activation state of all nodes within the layer.
400
+ This is typically done before processing a new input sequence or sample.
401
+
402
+ #### connect
403
+
404
+ `(target: import("D:/code-practice/NeatapticTS/src/architecture/node").default | import("D:/code-practice/NeatapticTS/src/architecture/layer").default | import("D:/code-practice/NeatapticTS/src/architecture/group").default, method: any, weight: number | undefined) => any[]`
405
+
406
+ Connects this layer's output to a target component (Layer, Group, or Node).
407
+
408
+ This method delegates the connection logic primarily to the layer's `output` group
409
+ or the target layer's `input` method. It establishes the forward connections
410
+ necessary for signal propagation.
411
+
412
+ Parameters:
413
+ - `target` - - The destination Layer, Group, or Node to connect to.
414
+ - `method` - - The connection method (e.g., `ALL_TO_ALL`, `ONE_TO_ONE`) defining the connection pattern. See `methods.groupConnection`.
415
+ - `weight` - - An optional fixed weight to assign to all created connections.
416
+
417
+ Returns: An array containing the newly created connection objects.
418
+
419
+ #### connections
420
+
421
+ Stores connection information related to this layer. This is often managed
422
+ by the network or higher-level structures rather than directly by the layer itself.
423
+ `in`: Incoming connections to the layer's nodes.
424
+ `out`: Outgoing connections from the layer's nodes.
425
+ `self`: Self-connections within the layer's nodes.
426
+
427
+ #### conv1d
428
+
429
+ `(size: number, kernelSize: number, stride: number, padding: number) => import("D:/code-practice/NeatapticTS/src/architecture/layer").default`
430
+
431
+ Creates a 1D convolutional layer (stub implementation).
432
+
433
+ Parameters:
434
+ - `size` - - Number of output nodes (filters).
435
+ - `kernelSize` - - Size of the convolution kernel.
436
+ - `stride` - - Stride of the convolution (default 1).
437
+ - `padding` - - Padding (default 0).
438
+
439
+ Returns: A new Layer instance representing a 1D convolutional layer.
440
+
441
+ #### dense
442
+
443
+ `(size: number) => import("D:/code-practice/NeatapticTS/src/architecture/layer").default`
444
+
445
+ Creates a standard fully connected (dense) layer.
446
+
447
+ All nodes in the source layer/group will connect to all nodes in this layer
448
+ when using the default `ALL_TO_ALL` connection method via `layer.input()`.
449
+
450
+ Parameters:
451
+ - `size` - - The number of nodes (neurons) in this layer.
452
+
453
+ Returns: A new Layer instance configured as a dense layer.
454
+
455
+ #### disconnect
456
+
457
+ `(target: import("D:/code-practice/NeatapticTS/src/architecture/node").default | import("D:/code-practice/NeatapticTS/src/architecture/group").default, twosided: boolean | undefined) => void`
458
+
459
+ Removes connections between this layer's nodes and a target Group or Node.
460
+
461
+ Parameters:
462
+ - `target` - - The Group or Node to disconnect from.
463
+ - `twosided` - - If true, removes connections in both directions (from this layer to target, and from target to this layer). Defaults to false.
464
+
465
+ #### dropout
466
+
467
+ Dropout rate for this layer (0 to 1). If > 0, all nodes in the layer are masked together during training.
468
+ Layer-level dropout takes precedence over node-level dropout for nodes in this layer.
469
+
470
+ #### gate
471
+
472
+ `(connections: any[], method: any) => void`
473
+
474
+ Applies gating to a set of connections originating from this layer's output group.
475
+
476
+ Gating allows the activity of nodes in this layer (specifically, the output group)
477
+ to modulate the flow of information through the specified `connections`.
478
+
479
+ Parameters:
480
+ - `connections` - - An array of connection objects to be gated.
481
+ - `method` - - The gating method (e.g., `INPUT`, `OUTPUT`, `SELF`) specifying how the gate influences the connection. See `methods.gating`.
482
+
483
+ #### gru
484
+
485
+ `(size: number) => import("D:/code-practice/NeatapticTS/src/architecture/layer").default`
486
+
487
+ Creates a Gated Recurrent Unit (GRU) layer.
488
+
489
+ GRUs are another type of recurrent neural network cell, often considered
490
+ simpler than LSTMs but achieving similar performance on many tasks.
491
+ They use an update gate and a reset gate to manage information flow.
492
+
493
+ Parameters:
494
+ - `size` - - The number of GRU units (and nodes in each gate/cell group).
495
+
496
+ Returns: A new Layer instance configured as a GRU layer.
497
+
498
+ #### input
499
+
500
+ `(from: import("D:/code-practice/NeatapticTS/src/architecture/layer").default | import("D:/code-practice/NeatapticTS/src/architecture/group").default, method: any, weight: number | undefined) => any[]`
501
+
502
+ Handles the connection logic when this layer is the *target* of a connection.
503
+
504
+ It connects the output of the `from` layer or group to this layer's primary
505
+ input mechanism (which is often the `output` group itself, but depends on the layer type).
506
+ This method is usually called by the `connect` method of the source layer/group.
507
+
508
+ Parameters:
509
+ - `from` - - The source Layer or Group connecting *to* this layer.
510
+ - `method` - - The connection method (e.g., `ALL_TO_ALL`). Defaults to `ALL_TO_ALL`.
511
+ - `weight` - - An optional fixed weight for the connections.
512
+
513
+ Returns: An array containing the newly created connection objects.
514
+
515
+ #### isGroup
516
+
517
+ `(obj: any) => boolean`
518
+
519
+ Type guard to check if an object is likely a `Group`.
520
+
521
+ This is a duck-typing check based on the presence of expected properties
522
+ (`set` method and `nodes` array). Used internally where `layer.nodes`
523
+ might contain `Group` instances (e.g., in `Memory` layers).
524
+
525
+ Parameters:
526
+ - `obj` - - The object to inspect.
527
+
528
+ Returns: `true` if the object has `set` and `nodes` properties matching a Group, `false` otherwise.
529
+
530
+ #### layerNorm
531
+
532
+ `(size: number) => import("D:/code-practice/NeatapticTS/src/architecture/layer").default`
533
+
534
+ Creates a layer normalization layer.
535
+ Applies layer normalization to the activations of the nodes in this layer during activation.
536
+
537
+ Parameters:
538
+ - `size` - - The number of nodes in this layer.
539
+
540
+ Returns: A new Layer instance configured as a layer normalization layer.
541
+
542
+ #### lstm
543
+
544
+ `(size: number) => import("D:/code-practice/NeatapticTS/src/architecture/layer").default`
545
+
546
+ Creates a Long Short-Term Memory (LSTM) layer.
547
+
548
+ LSTMs are a type of recurrent neural network (RNN) cell capable of learning
549
+ long-range dependencies. This implementation uses standard LSTM architecture
550
+ with input, forget, and output gates, and a memory cell.
551
+
552
+ Parameters:
553
+ - `size` - - The number of LSTM units (and nodes in each gate/cell group).
554
+
555
+ Returns: A new Layer instance configured as an LSTM layer.
556
+
557
+ #### memory
558
+
559
+ `(size: number, memory: number) => import("D:/code-practice/NeatapticTS/src/architecture/layer").default`
560
+
561
+ Creates a Memory layer, designed to hold state over a fixed number of time steps.
562
+
563
+ This layer consists of multiple groups (memory blocks), each holding the state
564
+ from a previous time step. The input connects to the most recent block, and
565
+ information propagates backward through the blocks. The layer's output
566
+ concatenates the states of all memory blocks.
567
+
568
+ Parameters:
569
+ - `size` - - The number of nodes in each memory block (must match the input size).
570
+ - `memory` - - The number of time steps to remember (number of memory blocks).
571
+
572
+ Returns: A new Layer instance configured as a Memory layer.
573
+
574
+ #### nodes
575
+
576
+ An array containing all the nodes (neurons or groups) that constitute this layer.
577
+ The order of nodes might be relevant depending on the layer type and its connections.
578
+
579
+ #### output
580
+
581
+ Represents the primary output group of nodes for this layer.
582
+ This group is typically used when connecting this layer *to* another layer or group.
583
+ It might be null if the layer is not yet fully constructed or is an input layer.
584
+
585
+ #### propagate
586
+
587
+ `(rate: number, momentum: number, target: number[] | undefined) => void`
588
+
589
+ Propagates the error backward through all nodes in the layer.
590
+
591
+ This is a core step in the backpropagation algorithm used for training.
592
+ If a `target` array is provided (typically for the output layer), it's used
593
+ to calculate the initial error for each node. Otherwise, nodes calculate
594
+ their error based on the error propagated from subsequent layers.
595
+
596
+ Parameters:
597
+ - `rate` - - The learning rate, controlling the step size of weight adjustments.
598
+ - `momentum` - - The momentum factor, used to smooth weight updates and escape local minima.
599
+ - `target` - - An optional array of target values (expected outputs) for the layer's nodes. The length must match the number of nodes.
600
+
601
+ #### set
602
+
603
+ `(values: { bias?: number | undefined; squash?: any; type?: string | undefined; }) => void`
604
+
605
+ Configures properties for all nodes within the layer.
606
+
607
+ Allows batch setting of common node properties like bias, activation function (`squash`),
608
+ or node type. If a node within the `nodes` array is actually a `Group` (e.g., in memory layers),
609
+ the configuration is applied recursively to the nodes within that group.
610
+
611
+ Parameters:
612
+ - `values` - - An object containing the properties and their values to set.
613
+ Example: `{ bias: 0.5, squash: methods.Activation.ReLU }`
614
+
615
+ ## architecture/network.ts
616
+
617
+ ### network
618
+
619
+ ### default
620
+
621
+ #### _applyGradientClipping
622
+
623
+ `(cfg: { mode: "norm" | "percentile" | "layerwiseNorm" | "layerwisePercentile"; maxNorm?: number | undefined; percentile?: number | undefined; }) => void`
624
+
625
+ Trains the network on a given dataset subset for one pass (epoch or batch).
626
+ Performs activation and backpropagation for each item in the set.
627
+ Updates weights based on batch size configuration.
628
+
629
+ Parameters:
630
+ - `` - - The training dataset subset (e.g., a batch or the full set for one epoch).
631
+ - `` - - The number of samples to process before updating weights.
632
+ - `` - - The learning rate to use for this training pass.
633
+ - `` - - The momentum factor to use.
634
+ - `` - - The regularization configuration (L1, L2, or custom function).
635
+ - `` - - The function used to calculate the error between target and output.
636
+
637
+ Returns: The average error calculated over the provided dataset subset.
638
+
639
+ #### activate
640
+
641
+ `(input: number[], training: boolean, maxActivationDepth: number) => number[]`
642
+
643
+ Activates the network using the given input array.
644
+ Performs a forward pass through the network, calculating the activation of each node.
645
+
646
+ Parameters:
647
+ - `` - - An array of numerical values corresponding to the network's input nodes.
648
+ - `` - - Flag indicating if the activation is part of a training process.
649
+ - `` - - Maximum allowed activation depth to prevent infinite loops/cycles.
650
+
651
+ Returns: An array of numerical values representing the activations of the network's output nodes.
652
+
653
+ #### activateBatch
654
+
655
+ `(inputs: number[][], training: boolean) => number[][]`
656
+
657
+ Activate the network over a batch of input vectors (micro-batching).
658
+
659
+ Currently iterates sample-by-sample while reusing the network's internal
660
+ fast-path allocations. Outputs are cloned number[] arrays for API
661
+ compatibility. Future optimizations can vectorize this path.
662
+
663
+ Parameters:
664
+ - `inputs` - Array of input vectors, each length must equal this.input
665
+ - `training` - Whether to run with training-time stochastic features
666
+
667
+ Returns: Array of output vectors, each length equals this.output
668
+
669
+ #### activateRaw
670
+
671
+ `(input: number[], training: boolean, maxActivationDepth: number) => any`
672
+
673
+ Raw activation that can return a typed array when pooling is enabled (zero-copy).
674
+ If reuseActivationArrays=false falls back to standard activate().
675
+
676
+ #### adjustRateForAccumulation
677
+
678
+ `(rate: number, accumulationSteps: number, reduction: "average" | "sum") => number`
679
+
680
+ Utility: adjust rate for accumulation mode (use result when switching to 'sum' to mimic 'average').
681
+
682
+ #### clear
683
+
684
+ `() => void`
685
+
686
+ Clears the internal state of all nodes in the network.
687
+ Resets node activation, state, eligibility traces, and extended traces to their initial values (usually 0).
688
+ This is typically done before processing a new input sequence in recurrent networks or between training epochs if desired.
689
+
690
+ #### clone
691
+
692
+ `() => import("D:/code-practice/NeatapticTS/src/architecture/network").default`
693
+
694
+ Creates a deep copy of the network.
695
+
696
+ Returns: A new Network instance that is a clone of the current network.
697
+
698
+ #### connect
699
+
700
+ `(from: import("D:/code-practice/NeatapticTS/src/architecture/node").default, to: import("D:/code-practice/NeatapticTS/src/architecture/node").default, weight: number | undefined) => import("D:/code-practice/NeatapticTS/src/architecture/connection").default[]`
701
+
702
+ Creates a connection between two nodes in the network.
703
+ Handles both regular connections and self-connections.
704
+ Adds the new connection object(s) to the appropriate network list (`connections` or `selfconns`).
705
+
706
+ Parameters:
707
+ - `` - - The source node of the connection.
708
+ - `` - - The target node of the connection.
709
+ - `` - - Optional weight for the connection. If not provided, a random weight is usually assigned by the underlying `Node.connect` method.
710
+
711
+ Returns: An array containing the newly created connection object(s). Typically contains one connection, but might be empty or contain more in specialized node types.
712
+
713
+ #### createMLP
714
+
715
+ `(inputCount: number, hiddenCounts: number[], outputCount: number) => import("D:/code-practice/NeatapticTS/src/architecture/network").default`
716
+
717
+ Creates a fully connected, strictly layered MLP network.
718
+
719
+ Parameters:
720
+ - `` - - Number of input nodes
721
+ - `` - - Array of hidden layer sizes (e.g. [2,3] for two hidden layers)
722
+ - `` - - Number of output nodes
723
+
724
+ Returns: A new, fully connected, layered MLP
725
+
726
+ #### crossOver
727
+
728
+ `(network1: import("D:/code-practice/NeatapticTS/src/architecture/network").default, network2: import("D:/code-practice/NeatapticTS/src/architecture/network").default, equal: boolean) => import("D:/code-practice/NeatapticTS/src/architecture/network").default`
729
+
730
+ Creates a new offspring network by performing crossover between two parent networks.
731
+ This method implements the crossover mechanism inspired by the NEAT algorithm and described
732
+ in the Instinct paper, combining genes (nodes and connections) from both parents.
733
+ Fitness scores can influence the inheritance process. Matching genes are inherited randomly,
734
+ while disjoint/excess genes are typically inherited from the fitter parent (or randomly if fitness is equal or `equal` flag is set).
735
+
736
+ Parameters:
737
+ - `` - - The first parent network.
738
+ - `` - - The second parent network.
739
+ - `` - - If true, disjoint and excess genes are inherited randomly regardless of fitness.
740
+ If false (default), they are inherited from the fitter parent.
741
+
742
+ Returns: A new Network instance representing the offspring.
743
+
744
+ #### deserialize
745
+
746
+ `(data: any[], inputSize: number | undefined, outputSize: number | undefined) => import("D:/code-practice/NeatapticTS/src/architecture/network").default`
747
+
748
+ Creates a Network instance from serialized data produced by `serialize()`.
749
+ Reconstructs the network structure and state based on the provided arrays.
750
+
751
+ Parameters:
752
+ - `` - - The serialized network data array, typically obtained from `network.serialize()`.
753
+ Expected format: `[activations, states, squashNames, connectionData, inputSize, outputSize]`.
754
+ - `` - - Optional input size override.
755
+ - `` - - Optional output size override.
756
+
757
+ Returns: A new Network instance reconstructed from the serialized data.
758
+
759
+ #### disconnect
760
+
761
+ `(from: import("D:/code-practice/NeatapticTS/src/architecture/node").default, to: import("D:/code-practice/NeatapticTS/src/architecture/node").default) => void`
762
+
763
+ Disconnects two nodes, removing the connection between them.
764
+ Handles both regular connections and self-connections.
765
+ If the connection being removed was gated, it is also ungated.
766
+
767
+ Parameters:
768
+ - `` - - The source node of the connection to remove.
769
+ - `` - - The target node of the connection to remove.
770
+
771
+ #### enableWeightNoise
772
+
773
+ `(stdDev: number | { perHiddenLayer: number[]; }) => void`
774
+
775
+ Enable weight noise. Provide a single std dev number or { perHiddenLayer: number[] }.
776
+
777
+ #### fromJSON
778
+
779
+ `(json: any) => import("D:/code-practice/NeatapticTS/src/architecture/network").default`
780
+
781
+ Reconstructs a network from a JSON object (latest standard).
782
+ Handles formatVersion, robust error handling, and index-based references.
783
+
784
+ Parameters:
785
+ - `` - - The JSON object representing the network.
786
+
787
+ Returns: The reconstructed network.
788
+
789
+ #### gate
790
+
791
+ `(node: import("D:/code-practice/NeatapticTS/src/architecture/node").default, connection: import("D:/code-practice/NeatapticTS/src/architecture/connection").default) => void`
792
+
793
+ Gates a connection with a specified node.
794
+ The activation of the `node` (gater) will modulate the weight of the `connection`.
795
+ Adds the connection to the network's `gates` list.
796
+
797
+ Parameters:
798
+ - `` - - The node that will act as the gater. Must be part of this network.
799
+ - `` - - The connection to be gated.
800
+
801
+ #### getLastGradClipGroupCount
802
+
803
+ `() => number`
804
+
805
+ Returns last gradient clipping group count (0 if no clipping yet).
806
+
807
+ #### getLossScale
808
+
809
+ `() => number`
810
+
811
+ Returns current mixed precision loss scale (1 if disabled).
812
+
813
+ #### getRawGradientNorm
814
+
815
+ `() => number`
816
+
817
+ Returns last recorded raw (pre-update) gradient L2 norm.
818
+
819
+ #### getTrainingStats
820
+
821
+ `() => { gradNorm: number; gradNormRaw: number; lossScale: number; optimizerStep: number; mp: { good: number; bad: number; overflowCount: number; scaleUps: number; scaleDowns: number; lastOverflowStep: number; }; }`
822
+
823
+ Consolidated training stats snapshot.
824
+
825
+ #### mutate
826
+
827
+ `(method: any) => void`
828
+
829
+ Mutates the network's structure or parameters according to the specified method.
830
+ This is a core operation for neuro-evolutionary algorithms (like NEAT).
831
+ The method argument should be one of the mutation types defined in `methods.mutation`.
832
+
833
+ Parameters:
834
+ - `` - - The mutation method to apply (e.g., `mutation.ADD_NODE`, `mutation.MOD_WEIGHT`).
835
+ Some methods might have associated parameters (e.g., `MOD_WEIGHT` uses `min`, `max`).
836
+
837
+ #### noTraceActivate
838
+
839
+ `(input: number[]) => number[]`
840
+
841
+ Activates the network without calculating eligibility traces.
842
+ This is a performance optimization for scenarios where backpropagation is not needed,
843
+ such as during testing, evaluation, or deployment (inference).
844
+
845
+ Parameters:
846
+ - `` - - An array of numerical values corresponding to the network's input nodes.
847
+ The length must match the network's `input` size.
848
+
849
+ Returns: An array of numerical values representing the activations of the network's output nodes.
850
+
851
+ #### propagate
852
+
853
+ `(rate: number, momentum: number, update: boolean, target: number[], regularization: number, costDerivative: ((target: number, output: number) => number) | undefined) => void`
854
+
855
+ Propagates the error backward through the network (backpropagation).
856
+ Calculates the error gradient for each node and connection.
857
+ If `update` is true, it adjusts the weights and biases based on the calculated gradients,
858
+ learning rate, momentum, and optional L2 regularization.
859
+
860
+ The process starts from the output nodes and moves backward layer by layer (or topologically for recurrent nets).
861
+
862
+ Parameters:
863
+ - `` - - The learning rate (controls the step size of weight adjustments).
864
+ - `` - - The momentum factor (helps overcome local minima and speeds up convergence). Typically between 0 and 1.
865
+ - `` - - If true, apply the calculated weight and bias updates. If false, only calculate gradients (e.g., for batch accumulation).
866
+ - `` - - An array of target values corresponding to the network's output nodes.
867
+ The length must match the network's `output` size.
868
+ - `` - - The L2 regularization factor (lambda). Helps prevent overfitting by penalizing large weights.
869
+ - `` - - Optional derivative of the cost function for output nodes.
870
+
871
+ #### pruneToSparsity
872
+
873
+ `(targetSparsity: number, method: "magnitude" | "snip") => void`
874
+
875
+ Immediately prune connections to reach (or approach) a target sparsity fraction.
876
+ Used by evolutionary pruning (generation-based) independent of training iteration schedule.
877
+
878
+ Parameters:
879
+ - `targetSparsity` - fraction in (0,1). 0.8 means keep 20% of original (if first call sets baseline)
880
+ - `method` - 'magnitude' | 'snip'
881
+
882
+ #### rebuildConnections
883
+
884
+ `(net: import("D:/code-practice/NeatapticTS/src/architecture/network").default) => void`
885
+
886
+ Rebuilds the network's connections array from all per-node connections.
887
+ This ensures that the network.connections array is consistent with the actual
888
+ outgoing connections of all nodes. Useful after manual wiring or node manipulation.
889
+
890
+ Parameters:
891
+ - `` - - The network instance to rebuild connections for.
892
+
893
+ Returns: Example usage:
894
+ Network.rebuildConnections(net);
895
+
896
+ #### remove
897
+
898
+ `(node: import("D:/code-practice/NeatapticTS/src/architecture/node").default) => void`
899
+
900
+ Removes a node from the network.
901
+ This involves:
902
+ 1. Disconnecting all incoming and outgoing connections associated with the node.
903
+ 2. Removing any self-connections.
904
+ 3. Removing the node from the `nodes` array.
905
+ 4. Attempting to reconnect the node's direct predecessors to its direct successors
906
+ to maintain network flow, if possible and configured.
907
+ 5. Handling gates involving the removed node (ungating connections gated *by* this node,
908
+ and potentially re-gating connections that were gated *by other nodes* onto the removed node's connections).
909
+
910
+ Parameters:
911
+ - `` - - The node instance to remove. Must exist within the network's `nodes` list.
912
+
913
+ #### resetDropoutMasks
914
+
915
+ `() => void`
916
+
917
+ Resets all masks in the network to 1 (no dropout). Applies to both node-level and layer-level dropout.
918
+ Should be called after training to ensure inference is unaffected by previous dropout.
919
+
920
+ #### serialize
921
+
922
+ `() => any[]`
923
+
924
+ Lightweight tuple serializer delegating to network.serialize.ts
925
+
926
+ #### set
927
+
928
+ `(values: { bias?: number | undefined; squash?: any; }) => void`
929
+
930
+ Sets specified properties (e.g., bias, squash function) for all nodes in the network.
931
+ Useful for initializing or resetting node properties uniformly.
932
+
933
+ Parameters:
934
+ - `` - - An object containing the properties and values to set.
935
+
936
+ #### setStochasticDepth
937
+
938
+ `(survival: number[]) => void`
939
+
940
+ Configure stochastic depth with survival probabilities per hidden layer (length must match hidden layer count when using layered network).
941
+
942
+ #### test
943
+
944
+ `(set: { input: number[]; output: number[]; }[], cost: any) => { error: number; time: number; }`
945
+
946
+ Tests the network's performance on a given dataset.
947
+ Calculates the average error over the dataset using a specified cost function.
948
+ Uses `noTraceActivate` for efficiency as gradients are not needed.
949
+ Handles dropout scaling if dropout was used during training.
950
+
951
+ Parameters:
952
+ - `` - - The test dataset, an array of objects with `input` and `output` arrays.
953
+ - `` - - The cost function to evaluate the error. Defaults to Mean Squared Error.
954
+
955
+ Returns: An object containing the calculated average error over the dataset and the time taken for the test in milliseconds.
956
+
957
+ #### toJSON
958
+
959
+ `() => object`
960
+
961
+ Converts the network into a JSON object representation (latest standard).
962
+ Includes formatVersion, and only serializes properties needed for full reconstruction.
963
+ All references are by index. Excludes runtime-only properties (activation, state, traces).
964
+
965
+ Returns: A JSON-compatible object representing the network.
966
+
967
+ #### toONNX
968
+
969
+ `() => import("D:/code-practice/NeatapticTS/src/architecture/network/network.onnx").OnnxModel`
970
+
971
+ Exports the network to ONNX format (JSON object, minimal MLP support).
972
+ Only standard feedforward architectures and standard activations are supported.
973
+ Gating, custom activations, and evolutionary features are ignored or replaced with Identity.
974
+
975
+ Returns: ONNX model as a JSON object.
976
+
977
+ #### ungate
978
+
979
+ `(connection: import("D:/code-practice/NeatapticTS/src/architecture/connection").default) => void`
980
+
981
+ Removes the gate from a specified connection.
982
+ The connection will no longer be modulated by its gater node.
983
+ Removes the connection from the network's `gates` list.
984
+
985
+ Parameters:
986
+ - `` - - The connection object to ungate.
987
+
988
+ ## architecture/node.ts
989
+
990
+ ### node
991
+
992
+ Represents a node (neuron) in a neural network graph.
993
+
994
+ Nodes are the fundamental processing units. They receive inputs, apply an activation function,
995
+ and produce an output. Nodes can be of type 'input', 'hidden', or 'output'. Hidden and output
996
+ nodes have biases and activation functions, which can be mutated during neuro-evolution.
997
+ This class also implements mechanisms for backpropagation, including support for momentum (NAG),
998
+ L2 regularization, dropout, and eligibility traces for recurrent connections.
999
+
1000
+ ### Node
1001
+
1002
+ Represents a node (neuron) in a neural network graph.
1003
+
1004
+ Nodes are the fundamental processing units. They receive inputs, apply an activation function,
1005
+ and produce an output. Nodes can be of type 'input', 'hidden', or 'output'. Hidden and output
1006
+ nodes have biases and activation functions, which can be mutated during neuro-evolution.
1007
+ This class also implements mechanisms for backpropagation, including support for momentum (NAG),
1008
+ L2 regularization, dropout, and eligibility traces for recurrent connections.
1009
+
1010
+ ### default
1011
+
1012
+ #### _activateCore
1013
+
1014
+ `(withTrace: boolean, input: number | undefined) => number`
1015
+
1016
+ Internal shared implementation for activate/noTraceActivate.
1017
+
1018
+ Parameters:
1019
+ - `withTrace` - Whether to update eligibility traces.
1020
+ - `input` - Optional externally supplied activation (bypasses weighted sum if provided).
1021
+
1022
+ #### _globalNodeIndex
1023
+
1024
+ Global index counter for assigning unique indices to nodes.
1025
+
1026
+ #### _safeUpdateWeight
1027
+
1028
+ `(connection: import("D:/code-practice/NeatapticTS/src/architecture/connection").default, delta: number) => void`
1029
+
1030
+ Internal helper to safely update a connection weight with clipping and NaN checks.
1031
+
1032
+ #### activate
1033
+
1034
+ `(input: number | undefined) => number`
1035
+
1036
+ Activates the node, calculating its output value based on inputs and state.
1037
+ This method also calculates eligibility traces (`xtrace`) used for training recurrent connections.
1038
+
1039
+ The activation process involves:
1040
+ 1. Calculating the node's internal state (`this.state`) based on:
1041
+ - Incoming connections' weighted activations.
1042
+ - The recurrent self-connection's weighted state from the previous timestep (`this.old`).
1043
+ - The node's bias.
1044
+ 2. Applying the activation function (`this.squash`) to the state to get the activation (`this.activation`).
1045
+ 3. Applying the dropout mask (`this.mask`).
1046
+ 4. Calculating the derivative of the activation function.
1047
+ 5. Updating the gain of connections gated by this node.
1048
+ 6. Calculating and updating eligibility traces for incoming connections.
1049
+
1050
+ Parameters:
1051
+ - `input` - Optional input value. If provided, sets the node's activation directly (used for input nodes).
1052
+
1053
+ Returns: The calculated activation value of the node.
1054
+
1055
+ #### activation
1056
+
1057
+ The output value of the node after applying the activation function. This is the value transmitted to connected nodes.
1058
+
1059
+ #### applyBatchUpdates
1060
+
1061
+ `(momentum: number) => void`
1062
+
1063
+ Applies accumulated batch updates to incoming and self connections and this node's bias.
1064
+ Uses momentum in a Nesterov-compatible way: currentDelta = accumulated + momentum * previousDelta.
1065
+ Resets accumulators after applying. Safe to call on any node type.
1066
+
1067
+ Parameters:
1068
+ - `momentum` - Momentum factor (0 to disable)
1069
+
1070
+ #### applyBatchUpdatesWithOptimizer
1071
+
1072
+ `(opts: { type: "sgd" | "rmsprop" | "adagrad" | "adam" | "adamw" | "amsgrad" | "adamax" | "nadam" | "radam" | "lion" | "adabelief" | "lookahead"; momentum?: number | undefined; beta1?: number | undefined; beta2?: number | undefined; eps?: number | undefined; weightDecay?: number | undefined; lrScale?: number | undefined; t?: number | undefined; baseType?: any; la_k?: number | undefined; la_alpha?: number | undefined; }) => void`
1073
+
1074
+ Extended batch update supporting multiple optimizers.
1075
+
1076
+ Applies accumulated (batch) gradients stored in `totalDeltaWeight` / `totalDeltaBias` to the
1077
+ underlying weights and bias using the selected optimization algorithm. Supports both classic
1078
+ SGD (with Nesterov-style momentum via preceding propagate logic) and a collection of adaptive
1079
+ optimizers. After applying an update, gradient accumulators are reset to 0.
1080
+
1081
+ Supported optimizers (type):
1082
+ - 'sgd' : Standard gradient descent with optional momentum.
1083
+ - 'rmsprop' : Exponential moving average of squared gradients (cache) to normalize step.
1084
+ - 'adagrad' : Accumulate squared gradients; learning rate effectively decays per weight.
1085
+ - 'adam' : Bias‑corrected first (m) & second (v) moment estimates.
1086
+ - 'adamw' : Adam with decoupled weight decay (applied after adaptive step).
1087
+ - 'amsgrad' : Adam variant maintaining a maximum of past v (vhat) to enforce non‑increasing step size.
1088
+ - 'adamax' : Adam variant using the infinity norm (u) instead of second moment.
1089
+ - 'nadam' : Adam + Nesterov momentum style update (lookahead on first moment).
1090
+ - 'radam' : Rectified Adam – warms up variance by adaptively rectifying denominator when sample size small.
1091
+ - 'lion' : Uses sign of combination of two momentum buffers (beta1 & beta2) for update direction only.
1092
+ - 'adabelief': Adam-like but second moment on (g - m) (gradient surprise) for variance reduction.
1093
+ - 'lookahead': Wrapper; performs k fast optimizer steps then interpolates (alpha) towards a slow (shadow) weight.
1094
+
1095
+ Options:
1096
+ - momentum : (SGD) momentum factor (Nesterov handled in propagate when update=true).
1097
+ - beta1/beta2 : Exponential decay rates for first/second moments (Adam family, Lion, AdaBelief, etc.).
1098
+ - eps : Numerical stability epsilon added to denominator terms.
1099
+ - weightDecay : Decoupled weight decay (AdamW) or additionally applied after main step when adamw selected.
1100
+ - lrScale : Learning rate scalar already scheduled externally (passed as currentRate).
1101
+ - t : Global step (1-indexed) for bias correction / rectification.
1102
+ - baseType : Underlying optimizer for lookahead (not itself lookahead).
1103
+ - la_k : Lookahead synchronization interval (number of fast steps).
1104
+ - la_alpha : Interpolation factor towards slow (shadow) weights/bias at sync points.
1105
+
1106
+ Internal per-connection temp fields (created lazily):
1107
+ - opt_m / opt_v / opt_vhat / opt_u : Moment / variance / max variance / infinity norm caches.
1108
+ - opt_cache : Single accumulator (RMSProp / AdaGrad).
1109
+ - previousDeltaWeight : For classic SGD momentum.
1110
+ - _la_shadowWeight / _la_shadowBias : Lookahead shadow copies.
1111
+
1112
+ Safety: We clip extreme weight / bias magnitudes and guard against NaN/Infinity.
1113
+
1114
+ Parameters:
1115
+ - `opts` - Optimizer configuration (see above).
1116
+
1117
+ #### bias
1118
+
1119
+ The bias value of the node. Added to the weighted sum of inputs before activation.
1120
+ Input nodes typically have a bias of 0.
1121
+
1122
+ #### clear
1123
+
1124
+ `() => void`
1125
+
1126
+ Clears the node's dynamic state information.
1127
+ Resets activation, state, previous state, error signals, and eligibility traces.
1128
+ Useful for starting a new activation sequence (e.g., for a new input pattern).
1129
+
1130
+ #### connect
1131
+
1132
+ `(target: import("D:/code-practice/NeatapticTS/src/architecture/node").default | { nodes: import("D:/code-practice/NeatapticTS/src/architecture/node").default[]; }, weight: number | undefined) => import("D:/code-practice/NeatapticTS/src/architecture/connection").default[]`
1133
+
1134
+ Creates a connection from this node to a target node or all nodes in a group.
1135
+
1136
+ Parameters:
1137
+ - `target` - The target Node or a group object containing a `nodes` array.
1138
+ - `weight` - The weight for the new connection(s). If undefined, a default or random weight might be assigned by the Connection constructor (currently defaults to 0, consider changing).
1139
+
1140
+ Returns: An array containing the newly created Connection object(s).
1141
+
1142
+ #### connections
1143
+
1144
+ Stores incoming, outgoing, gated, and self-connections for this node.
1145
+
1146
+ #### derivative
1147
+
1148
+ The derivative of the activation function evaluated at the node's current state. Used in backpropagation.
1149
+
1150
+ #### disconnect
1151
+
1152
+ `(target: import("D:/code-practice/NeatapticTS/src/architecture/node").default, twosided: boolean) => void`
1153
+
1154
+ Removes the connection from this node to the target node.
1155
+
1156
+ Parameters:
1157
+ - `target` - The target node to disconnect from.
1158
+ - `twosided` - If true, also removes the connection from the target node back to this node (if it exists). Defaults to false.
1159
+
1160
+ #### error
1161
+
1162
+ Stores error values calculated during backpropagation.
1163
+
1164
+ #### fromJSON
1165
+
1166
+ `(json: { bias: number; type: string; squash: string; mask: number; }) => import("D:/code-practice/NeatapticTS/src/architecture/node").default`
1167
+
1168
+ Creates a Node instance from a JSON object.
1169
+
1170
+ Parameters:
1171
+ - `json` - The JSON object containing node configuration.
1172
+
1173
+ Returns: A new Node instance configured according to the JSON object.
1174
+
1175
+ #### gate
1176
+
1177
+ `(connections: import("D:/code-practice/NeatapticTS/src/architecture/connection").default | import("D:/code-practice/NeatapticTS/src/architecture/connection").default[]) => void`
1178
+
1179
+ Makes this node gate the provided connection(s).
1180
+ The connection's gain will be controlled by this node's activation value.
1181
+
1182
+ Parameters:
1183
+ - `connections` - A single Connection object or an array of Connection objects to be gated.
1184
+
1185
+ #### gates
1186
+
1187
+ **Deprecated:** Use connections.gated; retained for legacy tests
1188
+
1189
+ #### geneId
1190
+
1191
+ Stable per-node gene identifier for NEAT innovation reuse
1192
+
1193
+ #### index
1194
+
1195
+ Optional index, potentially used to identify the node's position within a layer or network structure. Not used internally by the Node class itself.
1196
+
1197
+ #### isActivating
1198
+
1199
+ Internal flag to detect cycles during activation
1200
+
1201
+ #### isConnectedTo
1202
+
1203
+ `(target: import("D:/code-practice/NeatapticTS/src/architecture/node").default) => boolean`
1204
+
1205
+ Checks if this node is connected to another node.
1206
+
1207
+ Parameters:
1208
+ - `target` - The target node to check the connection with.
1209
+
1210
+ Returns: True if connected, otherwise false.
1211
+
1212
+ #### isProjectedBy
1213
+
1214
+ `(node: import("D:/code-practice/NeatapticTS/src/architecture/node").default) => boolean`
1215
+
1216
+ Checks if the given node has a direct outgoing connection to this node.
1217
+ Considers both regular incoming connections and the self-connection.
1218
+
1219
+ Parameters:
1220
+ - `node` - The potential source node.
1221
+
1222
+ Returns: True if the given node projects to this node, false otherwise.
1223
+
1224
+ #### isProjectingTo
1225
+
1226
+ `(node: import("D:/code-practice/NeatapticTS/src/architecture/node").default) => boolean`
1227
+
1228
+ Checks if this node has a direct outgoing connection to the given node.
1229
+ Considers both regular outgoing connections and the self-connection.
1230
+
1231
+ Parameters:
1232
+ - `node` - The potential target node.
1233
+
1234
+ Returns: True if this node projects to the target node, false otherwise.
1235
+
1236
+ #### mask
1237
+
1238
+ A mask factor (typically 0 or 1) used for implementing dropout. If 0, the node's output is effectively silenced.
1239
+
1240
+ #### mutate
1241
+
1242
+ `(method: any) => void`
1243
+
1244
+ Applies a mutation method to the node. Used in neuro-evolution.
1245
+
1246
+ This allows modifying the node's properties, such as its activation function or bias,
1247
+ based on predefined mutation methods.
1248
+
1249
+ Parameters:
1250
+ - `method` - A mutation method object, typically from `methods.mutation`. It should define the type of mutation and its parameters (e.g., allowed functions, modification range).
1251
+
1252
+ #### nodes
1253
+
1254
+ **Deprecated:** Placeholder kept for legacy structural algorithms. No longer populated.
1255
+
1256
+ #### noTraceActivate
1257
+
1258
+ `(input: number | undefined) => number`
1259
+
1260
+ Activates the node without calculating eligibility traces (`xtrace`).
1261
+ This is a performance optimization used during inference (when the network
1262
+ is just making predictions, not learning) as trace calculations are only needed for training.
1263
+
1264
+ Parameters:
1265
+ - `input` - Optional input value. If provided, sets the node's activation directly (used for input nodes).
1266
+
1267
+ Returns: The calculated activation value of the node.
1268
+
1269
+ #### old
1270
+
1271
+ The node's state from the previous activation cycle. Used for recurrent self-connections.
1272
+
1273
+ #### previousDeltaBias
1274
+
1275
+ The change in bias applied in the previous training iteration. Used for calculating momentum.
1276
+
1277
+ #### propagate
1278
+
1279
+ `(rate: number, momentum: number, update: boolean, regularization: number | { type: "L1" | "L2"; lambda: number; } | ((weight: number) => number), target: number | undefined) => void`
1280
+
1281
+ Back-propagates the error signal through the node and calculates weight/bias updates.
1282
+
1283
+ This method implements the backpropagation algorithm, including:
1284
+ 1. Calculating the node's error responsibility based on errors from subsequent nodes (`projected` error)
1285
+ and errors from connections it gates (`gated` error).
1286
+ 2. Calculating the gradient for each incoming connection's weight using eligibility traces (`xtrace`).
1287
+ 3. Calculating the change (delta) for weights and bias, incorporating:
1288
+ - Learning rate.
1289
+ - L1/L2/custom regularization.
1290
+ - Momentum (using Nesterov Accelerated Gradient - NAG).
1291
+ 4. Optionally applying the calculated updates immediately or accumulating them for batch training.
1292
+
1293
+ Parameters:
1294
+ - `rate` - The learning rate (controls the step size of updates).
1295
+ - `momentum` - The momentum factor (helps accelerate learning and overcome local minima). Uses NAG.
1296
+ - `update` - If true, apply the calculated weight/bias updates immediately. If false, accumulate them in `totalDelta*` properties for batch updates.
1297
+ - `regularization` - The regularization setting. Can be:
1298
+ - number (L2 lambda)
1299
+ - { type: 'L1'|'L2', lambda: number }
1300
+ - (weight: number) => number (custom function)
1301
+ - `target` - The target output value for this node. Only used if the node is of type 'output'.
1302
+
1303
+ #### setActivation
1304
+
1305
+ `(fn: (x: number, derivate?: boolean | undefined) => number) => void`
1306
+
1307
+ Sets a custom activation function for this node at runtime.
1308
+
1309
+ Parameters:
1310
+ - `fn` - The activation function (should handle derivative if needed).
1311
+
1312
+ #### squash
1313
+
1314
+ `(x: number, derivate: boolean | undefined) => number`
1315
+
1316
+ The activation function (squashing function) applied to the node's state.
1317
+ Maps the internal state to the node's output (activation).
1318
+
1319
+ Parameters:
1320
+ - `x` - The node's internal state (sum of weighted inputs + bias).
1321
+ - `derivate` - If true, returns the derivative of the function instead of the function value.
1322
+
1323
+ Returns: The activation value or its derivative.
1324
+
1325
+ #### state
1326
+
1327
+ The internal state of the node (sum of weighted inputs + bias) before the activation function is applied.
1328
+
1329
+ #### toJSON
1330
+
1331
+ `() => { index: number | undefined; bias: number; type: string; squash: string | null; mask: number; }`
1332
+
1333
+ Converts the node's essential properties to a JSON object for serialization.
1334
+ Does not include state, activation, error, or connection information, as these
1335
+ are typically transient or reconstructed separately.
1336
+
1337
+ Returns: A JSON representation of the node's configuration.
1338
+
1339
+ #### totalDeltaBias
1340
+
1341
+ Accumulates changes in bias over a mini-batch during batch training. Reset after each weight update.
1342
+
1343
+ #### type
1344
+
1345
+ The type of the node: 'input', 'hidden', or 'output'.
1346
+ Determines behavior (e.g., input nodes don't have biases modified typically, output nodes calculate error differently).
1347
+
1348
+ #### ungate
1349
+
1350
+ `(connections: import("D:/code-practice/NeatapticTS/src/architecture/connection").default | import("D:/code-practice/NeatapticTS/src/architecture/connection").default[]) => void`
1351
+
1352
+ Removes this node's gating control over the specified connection(s).
1353
+ Resets the connection's gain to 1 and removes it from the `connections.gated` list.
1354
+
1355
+ Parameters:
1356
+ - `connections` - A single Connection object or an array of Connection objects to ungate.
1357
+
1358
+ ## architecture/onnx.ts
1359
+
1360
+ ### exportToONNX
1361
+
1362
+ `(network: import("D:/code-practice/NeatapticTS/src/architecture/network").default) => import("D:/code-practice/NeatapticTS/src/architecture/network/network.onnx").OnnxModel`
1363
+
1364
+ Export a minimal multilayer perceptron Network to a lightweight ONNX JSON object.
1365
+
1366
+ Steps:
1367
+ 1. Rebuild connection cache ensuring up-to-date adjacency.
1368
+ 2. Index nodes for error messaging.
1369
+ 3. Infer strict layer ordering (throws if structure unsupported).
1370
+ 4. Validate homogeneity & full connectivity layer-to-layer.
1371
+ 5. Build initializer tensors (weights + biases) and node list (Gemm + activation pairs).
1372
+
1373
+ Constraints: See module doc. Throws descriptive errors when assumptions violated.
1374
+
1375
+ ### importFromONNX
1376
+
1377
+ `(onnx: import("D:/code-practice/NeatapticTS/src/architecture/network/network.onnx").OnnxModel) => import("D:/code-practice/NeatapticTS/src/architecture/network").default`
1378
+
1379
+ Import a model previously produced by {@link exportToONNX} into a fresh Network instance.
1380
+
1381
+ Steps:
1382
+ 1. Read input/output dimensions.
1383
+ 2. Derive hidden layer sizes from weight tensor shapes.
1384
+ 3. Create corresponding MLP with identical layer counts.
1385
+ 4. Assign weights & biases.
1386
+ 5. Map activation op_types back to internal activation functions.
1387
+ 6. Rebuild flat connection list.
1388
+
1389
+ Limitations: Only guaranteed for self-produced ONNX; inconsistent naming or ordering will break.
1390
+
1391
+ ### OnnxModel