numopt-js 0.3.1 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -14,6 +14,7 @@ A flexible numerical optimization library for JavaScript/TypeScript that works s
14
14
  - **Strong Wolfe Line Search**: Robust line search satisfying Strong Wolfe conditions (recommended for quasi-Newton methods)
15
15
  - **BFGS**: Quasi-Newton method that updates a dense inverse Hessian approximation (unconstrained smooth optimization)
16
16
  - **L-BFGS**: Memory-efficient quasi-Newton method (two-loop recursion) for larger parameter counts
17
+ - **CMA-ES**: Derivative-free black-box optimization using a covariance-adapting search distribution (unconstrained)
17
18
  - **Gauss-Newton Method**: Efficient method for nonlinear least squares problems
18
19
  - **Levenberg-Marquardt Algorithm**: Robust algorithm combining Gauss-Newton with damping
19
20
  - **Constrained Gauss-Newton**: Efficient constrained nonlinear least squares using effective Jacobian
@@ -39,6 +40,7 @@ npm install numopt-js
39
40
 
40
41
  - **Minimize a scalar cost** (smooth unconstrained optimization): use **Gradient Descent** (`cost: (p) => number`, `grad: (p) => Float64Array`). Start at [Gradient Descent](#gradient-descent).
41
42
  - **Minimize a scalar cost (faster than GD for many problems)**: use **BFGS** or **L-BFGS** (`cost: (p) => number`, `grad: (p) => Float64Array`). Start at [BFGS / L-BFGS](#bfgs--l-bfgs).
43
+ - **Minimize a scalar cost (black-box, no gradient)**: use **CMA-ES** (`cost: (p) => number`). Start at [CMA-ES](#cma-es-black-box-optimization).
42
44
  - **Fit a model with residuals** (nonlinear least squares): use **Levenberg–Marquardt** or **Gauss–Newton** (`residual: (p) => Float64Array`, optional `jacobian: (p) => Matrix`). Start at [Levenberg-Marquardt](#levenberg-marquardt-nonlinear-least-squares).
43
45
  - **Equality-constrained problems** \(c(p, x) = 0\): use **Adjoint** / **Constrained GN/LM** (`constraint: (p, x) => Float64Array`). Start at [Adjoint Method](#adjoint-method-constrained-optimization).
44
46
  - **Browser usage**: start at [Browser Usage](#browser-usage).
@@ -57,6 +59,7 @@ Most algorithms return a `result` with these fields:
57
59
  - **Common (all algorithms)**: `finalParameters`, `converged`, `iterations`, `finalCost`
58
60
  - **Gradient Descent**: `finalGradientNorm`, `usedLineSearch`
59
61
  - **BFGS / L-BFGS**: `finalGradientNorm`
62
+ - **CMA-ES**: `functionEvaluations`, `finalStepSize`, `finalMaxStdDev`
60
63
  - **Gauss-Newton / Levenberg–Marquardt**: `finalResidualNorm` (and LM also has `finalLambda`)
61
64
  - **Constrained algorithms / Adjoint**: `finalStates`, `finalConstraintNorm`
62
65
 
@@ -140,6 +143,48 @@ Run:
140
143
  node quick.cjs
141
144
  ```
142
145
 
146
+ ## CMA-ES (Black-box Optimization)
147
+
148
+ Use CMA-ES when your cost function is a **black box** and you **don’t have a gradient**.
149
+
150
+ Two important inputs:
151
+ - `initialStepSize` (sigma0): your initial error guess / search radius
152
+ - `randomSeed`: set this for reproducible runs (recommended for testing and debugging)
153
+ - `restartStrategy`: use `"ipop"` for multi-modal problems (λ doubles per restart)
154
+ - `profiling`: enable lightweight timing breakdown in the result
155
+
156
+ ```js
157
+ import { cmaEs } from 'numopt-js';
158
+
159
+ const sphere = (params) => params.reduce((sum, v) => sum + v * v, 0);
160
+
161
+ const result = cmaEs(new Float64Array([10, -7, 3, 5]), sphere, {
162
+ maxIterations: 200,
163
+ populationSize: 20,
164
+ initialStepSize: 2.0,
165
+ randomSeed: 123456,
166
+ targetCost: 1e-10,
167
+ restartStrategy: "none",
168
+ profiling: true,
169
+ });
170
+
171
+ console.log(result.finalParameters, result.finalCost, result.stopReason, result.profiling);
172
+ ```
173
+
174
+ ### IPOP (Restart Strategy)
175
+
176
+ For multi-modal problems (e.g., Rastrigin), use IPOP restarts:
177
+
178
+ ```js
179
+ const result = cmaEs(new Float64Array([5, 5, 5, 5, 5, 5, 5, 5, 5, 5]), sphere, {
180
+ maxIterations: 1200,
181
+ populationSize: 20,
182
+ initialStepSize: 2.5,
183
+ randomSeed: 123456,
184
+ restartStrategy: "ipop",
185
+ });
186
+ ```
187
+
143
188
  ## Node Usage (ESM + CommonJS)
144
189
 
145
190
  numopt-js supports both ESM (`import`) and CommonJS (`require`) in Node.js.
@@ -0,0 +1,17 @@
1
+ /**
2
+ * This file implements vanilla CMA-ES and IPOP-CMA-ES restart strategy
3
+ * for unconstrained black-box optimization (no gradients required).
4
+ *
5
+ * Role in system:
6
+ * - Provides a derivative-free optimizer for scalar cost functions
7
+ * - Adds IPOP restarts (λ doubles per restart) while preserving libcmaes semantics
8
+ * - Mirrors libcmaes default parameter formulas and core stop criteria
9
+ *
10
+ * For first-time readers:
11
+ * - Start with `cmaEs()` (public entry point)
12
+ * - `runSingleCmaEs()` executes one CMA-ES run (no restarts)
13
+ * - Restart logic wraps `runSingleCmaEs()` when `restartStrategy: "ipop"`
14
+ */
15
+ import type { CostFn, CmaEsOptions, CmaEsResult } from './types.js';
16
+ export declare function cmaEs(initialParameters: Float64Array, costFunction: CostFn, options?: CmaEsOptions): CmaEsResult;
17
+ //# sourceMappingURL=cmaEs.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"cmaEs.d.ts","sourceRoot":"","sources":["../../src/core/cmaEs.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;GAaG;AAGH,OAAO,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,WAAW,EAAE,MAAM,YAAY,CAAC;AAyvBpE,wBAAgB,KAAK,CACnB,iBAAiB,EAAE,YAAY,EAC/B,YAAY,EAAE,MAAM,EACpB,OAAO,GAAE,YAAiB,GACzB,WAAW,CAsIb"}