numopt-js 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CODING_RULES.md +161 -0
- package/LICENSE +22 -0
- package/README.md +807 -0
- package/dist/core/adjointGradientDescent.d.ts +61 -0
- package/dist/core/adjointGradientDescent.d.ts.map +1 -0
- package/dist/core/adjointGradientDescent.js +764 -0
- package/dist/core/adjointGradientDescent.js.map +1 -0
- package/dist/core/constrainedGaussNewton.d.ts +44 -0
- package/dist/core/constrainedGaussNewton.d.ts.map +1 -0
- package/dist/core/constrainedGaussNewton.js +314 -0
- package/dist/core/constrainedGaussNewton.js.map +1 -0
- package/dist/core/constrainedLevenbergMarquardt.d.ts +46 -0
- package/dist/core/constrainedLevenbergMarquardt.d.ts.map +1 -0
- package/dist/core/constrainedLevenbergMarquardt.js +469 -0
- package/dist/core/constrainedLevenbergMarquardt.js.map +1 -0
- package/dist/core/constrainedUtils.d.ts +92 -0
- package/dist/core/constrainedUtils.d.ts.map +1 -0
- package/dist/core/constrainedUtils.js +364 -0
- package/dist/core/constrainedUtils.js.map +1 -0
- package/dist/core/convergence.d.ts +35 -0
- package/dist/core/convergence.d.ts.map +1 -0
- package/dist/core/convergence.js +51 -0
- package/dist/core/convergence.js.map +1 -0
- package/dist/core/createGradientFunction.d.ts +85 -0
- package/dist/core/createGradientFunction.d.ts.map +1 -0
- package/dist/core/createGradientFunction.js +93 -0
- package/dist/core/createGradientFunction.js.map +1 -0
- package/dist/core/effectiveJacobian.d.ts +90 -0
- package/dist/core/effectiveJacobian.d.ts.map +1 -0
- package/dist/core/effectiveJacobian.js +128 -0
- package/dist/core/effectiveJacobian.js.map +1 -0
- package/dist/core/finiteDiff.d.ts +171 -0
- package/dist/core/finiteDiff.d.ts.map +1 -0
- package/dist/core/finiteDiff.js +363 -0
- package/dist/core/finiteDiff.js.map +1 -0
- package/dist/core/gaussNewton.d.ts +29 -0
- package/dist/core/gaussNewton.d.ts.map +1 -0
- package/dist/core/gaussNewton.js +151 -0
- package/dist/core/gaussNewton.js.map +1 -0
- package/dist/core/gradientDescent.d.ts +35 -0
- package/dist/core/gradientDescent.d.ts.map +1 -0
- package/dist/core/gradientDescent.js +204 -0
- package/dist/core/gradientDescent.js.map +1 -0
- package/dist/core/jacobianComputation.d.ts +24 -0
- package/dist/core/jacobianComputation.d.ts.map +1 -0
- package/dist/core/jacobianComputation.js +38 -0
- package/dist/core/jacobianComputation.js.map +1 -0
- package/dist/core/levenbergMarquardt.d.ts +36 -0
- package/dist/core/levenbergMarquardt.d.ts.map +1 -0
- package/dist/core/levenbergMarquardt.js +286 -0
- package/dist/core/levenbergMarquardt.js.map +1 -0
- package/dist/core/lineSearch.d.ts +42 -0
- package/dist/core/lineSearch.d.ts.map +1 -0
- package/dist/core/lineSearch.js +106 -0
- package/dist/core/lineSearch.js.map +1 -0
- package/dist/core/logger.d.ts +77 -0
- package/dist/core/logger.d.ts.map +1 -0
- package/dist/core/logger.js +162 -0
- package/dist/core/logger.js.map +1 -0
- package/dist/core/types.d.ts +427 -0
- package/dist/core/types.d.ts.map +1 -0
- package/dist/core/types.js +15 -0
- package/dist/core/types.js.map +1 -0
- package/dist/index.d.ts +26 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +29 -0
- package/dist/index.js.map +1 -0
- package/dist/utils/formatting.d.ts +27 -0
- package/dist/utils/formatting.d.ts.map +1 -0
- package/dist/utils/formatting.js +54 -0
- package/dist/utils/formatting.js.map +1 -0
- package/dist/utils/matrix.d.ts +63 -0
- package/dist/utils/matrix.d.ts.map +1 -0
- package/dist/utils/matrix.js +129 -0
- package/dist/utils/matrix.js.map +1 -0
- package/dist/utils/resultFormatter.d.ts +122 -0
- package/dist/utils/resultFormatter.d.ts.map +1 -0
- package/dist/utils/resultFormatter.js +342 -0
- package/dist/utils/resultFormatter.js.map +1 -0
- package/package.json +74 -0
|
@@ -0,0 +1,469 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* This file implements the constrained Levenberg-Marquardt algorithm for solving
|
|
3
|
+
* nonlinear least squares problems with constraints.
|
|
4
|
+
*
|
|
5
|
+
* The constrained Levenberg-Marquardt method uses the effective Jacobian concept:
|
|
6
|
+
* J_eff = r_p - r_x C_x^+ C_p, which captures all constraint effects.
|
|
7
|
+
* This allows the algorithm to use the same structure as unconstrained
|
|
8
|
+
* Levenberg-Marquardt: (J_eff^T J_eff + λI) δ = -J_eff^T r
|
|
9
|
+
*
|
|
10
|
+
* Role in system:
|
|
11
|
+
* - Constrained version of Levenberg-Marquardt method
|
|
12
|
+
* - Uses effective Jacobian computed via adjoint method
|
|
13
|
+
* - Updates both parameters and states to maintain constraint satisfaction
|
|
14
|
+
* - More robust than constrained Gauss-Newton (handles singular matrices)
|
|
15
|
+
*
|
|
16
|
+
* For first-time readers:
|
|
17
|
+
* - Start with constrainedLevenbergMarquardt function
|
|
18
|
+
* - Understand how effective Jacobian replaces regular Jacobian
|
|
19
|
+
* - Check lambda update strategy and damping mechanism
|
|
20
|
+
*/
|
|
21
|
+
import { Matrix, CholeskyDecomposition } from 'ml-matrix';
|
|
22
|
+
import { float64ArrayToMatrix, matrixToFloat64Array, vectorNorm, computeSumOfSquaredResiduals } from '../utils/matrix.js';
|
|
23
|
+
import { checkGradientConvergence, checkStepSizeConvergence, checkResidualConvergence } from './convergence.js';
|
|
24
|
+
import { computeEffectiveJacobian } from './effectiveJacobian.js';
|
|
25
|
+
import { updateStates, validateInitialConditions, projectStatesToConstraints } from './constrainedUtils.js';
|
|
26
|
+
import { Logger } from './logger.js';
|
|
27
|
+
import { finiteDiffConstraintPartialP, finiteDiffConstraintPartialX } from './finiteDiff.js';
|
|
28
|
+
const DEFAULT_MAX_ITERATIONS = 1000;
|
|
29
|
+
const DEFAULT_LAMBDA_INITIAL = 1e-3;
|
|
30
|
+
const DEFAULT_LAMBDA_FACTOR = 10.0;
|
|
31
|
+
const DEFAULT_TOL_GRADIENT = 1e-6;
|
|
32
|
+
const DEFAULT_TOL_STEP = 1e-6;
|
|
33
|
+
const DEFAULT_TOL_RESIDUAL = 1e-6;
|
|
34
|
+
const DEFAULT_CONSTRAINT_TOLERANCE = 1e-6;
|
|
35
|
+
const DEFAULT_STEP_SIZE_P = 1e-6;
|
|
36
|
+
const DEFAULT_STEP_SIZE_X = 1e-6;
|
|
37
|
+
const MAXIMUM_LAMBDA_THRESHOLD = 1e10; // Maximum lambda before giving up (prevents infinite loop)
|
|
38
|
+
const NEGATIVE_COEFFICIENT = -1.0; // Coefficient for negative right-hand side in damped normal equations: (J_eff^T J_eff + λI) δ = -J_eff^T r
|
|
39
|
+
/**
|
|
40
|
+
* Computes J_eff^T J_eff and J_eff^T r matrices needed for normal equations.
|
|
41
|
+
* Returns both matrices for use in solving damped normal equations.
|
|
42
|
+
*/
|
|
43
|
+
function computeNormalEquationsMatrices(effectiveJacobian, residual) {
|
|
44
|
+
const jacobianTranspose = effectiveJacobian.transpose();
|
|
45
|
+
const jtj = jacobianTranspose.mmul(effectiveJacobian);
|
|
46
|
+
const residualMatrix = float64ArrayToMatrix(residual);
|
|
47
|
+
const jtr = jacobianTranspose.mmul(residualMatrix);
|
|
48
|
+
return { jtj, jtr };
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Creates a convergence result object for constrained Levenberg-Marquardt algorithm.
|
|
52
|
+
* Centralizes result creation to avoid code duplication.
|
|
53
|
+
*/
|
|
54
|
+
function createConvergenceResultForLM(parameters, states, iteration, converged, finalCost, finalGradientNorm, finalResidualNorm, finalConstraintNorm, finalLambda) {
|
|
55
|
+
return {
|
|
56
|
+
parameters,
|
|
57
|
+
iterations: iteration + 1,
|
|
58
|
+
converged,
|
|
59
|
+
finalCost,
|
|
60
|
+
finalGradientNorm,
|
|
61
|
+
finalResidualNorm,
|
|
62
|
+
finalLambda,
|
|
63
|
+
finalStates: states,
|
|
64
|
+
finalConstraintNorm
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Checks constraint violation and logs warning if needed.
|
|
69
|
+
*/
|
|
70
|
+
function checkConstraintViolation(currentParameters, currentStates, constraintFunction, constraintTolerance, iteration, logger) {
|
|
71
|
+
const constraint = constraintFunction(currentParameters, currentStates);
|
|
72
|
+
const constraintNorm = vectorNorm(constraint);
|
|
73
|
+
if (constraintNorm > constraintTolerance) {
|
|
74
|
+
logger.warn('constrainedLevenbergMarquardt', iteration, 'Constraint violation detected', [
|
|
75
|
+
{ key: '||c(p,x)||:', value: constraintNorm },
|
|
76
|
+
{ key: 'Tolerance:', value: constraintTolerance }
|
|
77
|
+
]);
|
|
78
|
+
}
|
|
79
|
+
return { constraintNorm };
|
|
80
|
+
}
|
|
81
|
+
/**
|
|
82
|
+
* Solves damped normal equations for Levenberg-Marquardt step.
|
|
83
|
+
* Damping parameter lambda interpolates between Gauss-Newton (λ→0) and gradient descent (λ→∞).
|
|
84
|
+
* Returns step vector or increases lambda if Cholesky decomposition fails.
|
|
85
|
+
*/
|
|
86
|
+
function solveDampedNormalEquations(jtj, jtr, currentLambda, lambdaFactor) {
|
|
87
|
+
const parameterCount = jtj.rows;
|
|
88
|
+
const identity = Matrix.eye(parameterCount, parameterCount);
|
|
89
|
+
const dampedHessian = jtj.add(identity.mul(currentLambda));
|
|
90
|
+
const negativeJtr = jtr.mul(NEGATIVE_COEFFICIENT);
|
|
91
|
+
let stepMatrix;
|
|
92
|
+
try {
|
|
93
|
+
const cholesky = new CholeskyDecomposition(dampedHessian);
|
|
94
|
+
if (cholesky.isPositiveDefinite()) {
|
|
95
|
+
stepMatrix = cholesky.solve(negativeJtr);
|
|
96
|
+
}
|
|
97
|
+
else {
|
|
98
|
+
const newLambda = currentLambda * lambdaFactor;
|
|
99
|
+
return { newLambda };
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
catch (choleskyError) {
|
|
103
|
+
const newLambda = currentLambda * lambdaFactor;
|
|
104
|
+
return { newLambda };
|
|
105
|
+
}
|
|
106
|
+
const step = matrixToFloat64Array(stepMatrix);
|
|
107
|
+
const stepNorm = vectorNorm(step);
|
|
108
|
+
return { step, stepNorm };
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* Evaluates step quality by comparing new cost to current cost.
|
|
112
|
+
* Returns acceptance result with updated lambda based on cost improvement.
|
|
113
|
+
*/
|
|
114
|
+
function evaluateStepQuality(newParameters, newStates, newCost, currentCost, currentLambda, lambdaFactor, iteration, logger) {
|
|
115
|
+
if (newCost < currentCost) {
|
|
116
|
+
const newLambda = currentLambda / lambdaFactor;
|
|
117
|
+
logger.debug('constrainedLevenbergMarquardt', iteration, 'Step accepted', [
|
|
118
|
+
{ key: 'Cost:', value: currentCost },
|
|
119
|
+
{ key: 'New cost:', value: newCost },
|
|
120
|
+
{ key: 'Lambda:', value: newLambda }
|
|
121
|
+
]);
|
|
122
|
+
return { stepAccepted: true, newParameters, newStates, newLambda };
|
|
123
|
+
}
|
|
124
|
+
const newLambda = currentLambda * lambdaFactor;
|
|
125
|
+
logger.debug('constrainedLevenbergMarquardt', iteration, 'Step rejected', [
|
|
126
|
+
{ key: 'Cost:', value: currentCost },
|
|
127
|
+
{ key: 'New cost:', value: newCost },
|
|
128
|
+
{ key: 'Lambda:', value: newLambda }
|
|
129
|
+
]);
|
|
130
|
+
return { stepAccepted: false, newParameters, newStates, newLambda };
|
|
131
|
+
}
|
|
132
|
+
/**
|
|
133
|
+
* Tries a constrained Levenberg-Marquardt step by solving damped normal equations.
|
|
134
|
+
* Returns whether step was accepted and updated parameters/states/lambda.
|
|
135
|
+
*/
|
|
136
|
+
function tryConstrainedLevenbergMarquardtStep(jtj, jtr, currentParameters, currentStates, currentLambda, lambdaFactor, residualFunction, constraintFunction, currentCost, tolStep, iteration, stepSizeP, stepSizeX, constraintTolerance, logger, dcdp, dcdx) {
|
|
137
|
+
// Lambda threshold prevents infinite loops when matrix is severely ill-conditioned
|
|
138
|
+
if (currentLambda >= MAXIMUM_LAMBDA_THRESHOLD) {
|
|
139
|
+
logger.warn('constrainedLevenbergMarquardt', iteration, 'Lambda too large, stopping optimization', [
|
|
140
|
+
{ key: 'Lambda:', value: currentLambda },
|
|
141
|
+
{ key: 'Cost:', value: currentCost }
|
|
142
|
+
]);
|
|
143
|
+
return { stepAccepted: false, newLambda: currentLambda, shouldStop: true };
|
|
144
|
+
}
|
|
145
|
+
try {
|
|
146
|
+
const solveResult = solveDampedNormalEquations(jtj, jtr, currentLambda, lambdaFactor);
|
|
147
|
+
if ('newLambda' in solveResult) {
|
|
148
|
+
return { stepAccepted: false, newLambda: solveResult.newLambda };
|
|
149
|
+
}
|
|
150
|
+
const { step, stepNorm } = solveResult;
|
|
151
|
+
if (checkStepSizeConvergence(stepNorm, tolStep, iteration)) {
|
|
152
|
+
const newLambda = currentLambda * lambdaFactor;
|
|
153
|
+
return { stepAccepted: false, newLambda, stepNorm };
|
|
154
|
+
}
|
|
155
|
+
const newParameters = new Float64Array(currentParameters.length);
|
|
156
|
+
for (let i = 0; i < currentParameters.length; i++) {
|
|
157
|
+
newParameters[i] = currentParameters[i] + step[i];
|
|
158
|
+
}
|
|
159
|
+
const c_x = dcdx
|
|
160
|
+
? dcdx(currentParameters, currentStates)
|
|
161
|
+
: finiteDiffConstraintPartialX(currentParameters, currentStates, constraintFunction, { stepSize: stepSizeX });
|
|
162
|
+
const c_p = dcdp
|
|
163
|
+
? dcdp(currentParameters, currentStates)
|
|
164
|
+
: finiteDiffConstraintPartialP(currentParameters, currentStates, constraintFunction, { stepSize: stepSizeP });
|
|
165
|
+
const newStates = updateStates(currentStates, c_x, c_p, step, logger, 'constrainedLevenbergMarquardt');
|
|
166
|
+
const projectedStates = projectStatesToConstraints(newParameters, newStates, constraintFunction, stepSizeX, constraintTolerance, logger, 'constrainedLevenbergMarquardt');
|
|
167
|
+
const newResidual = residualFunction(newParameters, projectedStates);
|
|
168
|
+
const newResidualNorm = vectorNorm(newResidual);
|
|
169
|
+
const newCost = computeSumOfSquaredResiduals(newResidualNorm);
|
|
170
|
+
const evaluationResult = evaluateStepQuality(newParameters, projectedStates, newCost, currentCost, currentLambda, lambdaFactor, iteration, logger);
|
|
171
|
+
return {
|
|
172
|
+
stepAccepted: evaluationResult.stepAccepted,
|
|
173
|
+
newParameters,
|
|
174
|
+
newStates: evaluationResult.newStates,
|
|
175
|
+
newLambda: evaluationResult.newLambda
|
|
176
|
+
};
|
|
177
|
+
}
|
|
178
|
+
catch (error) {
|
|
179
|
+
// Numerical issues indicate ill-conditioning: increasing lambda improves conditioning
|
|
180
|
+
const newLambda = currentLambda * lambdaFactor;
|
|
181
|
+
if (logger) {
|
|
182
|
+
logger.warn('constrainedLevenbergMarquardt', iteration, 'Singular matrix encountered, increasing lambda', [
|
|
183
|
+
{ key: 'Lambda:', value: newLambda },
|
|
184
|
+
{ key: 'Cost:', value: currentCost }
|
|
185
|
+
]);
|
|
186
|
+
}
|
|
187
|
+
return { stepAccepted: false, newLambda };
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
/**
|
|
191
|
+
* Checks convergence criteria for constrained Levenberg-Marquardt.
|
|
192
|
+
* Returns convergence result if converged, null otherwise.
|
|
193
|
+
*/
|
|
194
|
+
function checkConvergenceForLM(gradientNorm, constraintSatisfied, tolGradient, iteration, currentParameters, currentStates, cost, residualNorm, constraintNorm, currentLambda, logger) {
|
|
195
|
+
if (constraintSatisfied && checkGradientConvergence(gradientNorm, tolGradient, iteration)) {
|
|
196
|
+
logger.info('constrainedLevenbergMarquardt', iteration, 'Converged', [
|
|
197
|
+
{ key: 'Cost:', value: cost },
|
|
198
|
+
{ key: 'Gradient norm:', value: gradientNorm },
|
|
199
|
+
{ key: 'Residual norm:', value: residualNorm },
|
|
200
|
+
{ key: 'Constraint norm:', value: constraintNorm },
|
|
201
|
+
{ key: 'Lambda:', value: currentLambda }
|
|
202
|
+
]);
|
|
203
|
+
return createConvergenceResultForLM(currentParameters, currentStates, iteration, true, cost, gradientNorm, residualNorm, constraintNorm, currentLambda);
|
|
204
|
+
}
|
|
205
|
+
return null;
|
|
206
|
+
}
|
|
207
|
+
/**
|
|
208
|
+
* Tries steps with increasing lambda until an acceptable step is found.
|
|
209
|
+
* Returns step result or indicates that optimization should stop.
|
|
210
|
+
*/
|
|
211
|
+
function tryStepWithLambda(jtj, jtr, currentParameters, currentStates, currentLambda, lambdaFactor, residualFunction, constraintFunction, cost, tolStep, iteration, stepSizeP, stepSizeX, constraintTolerance, constraintSatisfied, gradientNorm, residualNorm, constraintNorm, logger, dcdp, dcdx) {
|
|
212
|
+
let stepAccepted = false;
|
|
213
|
+
let updatedLambda = currentLambda;
|
|
214
|
+
let updatedParameters;
|
|
215
|
+
let updatedStates;
|
|
216
|
+
while (!stepAccepted && updatedLambda < MAXIMUM_LAMBDA_THRESHOLD) {
|
|
217
|
+
const stepResult = tryConstrainedLevenbergMarquardtStep(jtj, jtr, currentParameters, currentStates, updatedLambda, lambdaFactor, residualFunction, constraintFunction, cost, tolStep, iteration, stepSizeP, stepSizeX, constraintTolerance, logger, dcdp, dcdx);
|
|
218
|
+
if (stepResult.shouldStop) {
|
|
219
|
+
return { stepAccepted: false, updatedLambda, shouldStop: true };
|
|
220
|
+
}
|
|
221
|
+
if (stepResult.stepNorm !== undefined &&
|
|
222
|
+
constraintSatisfied &&
|
|
223
|
+
checkStepSizeConvergence(stepResult.stepNorm, tolStep, iteration)) {
|
|
224
|
+
logger.info('constrainedLevenbergMarquardt', iteration, 'Converged', [
|
|
225
|
+
{ key: 'Cost:', value: cost },
|
|
226
|
+
{ key: 'Gradient norm:', value: gradientNorm },
|
|
227
|
+
{ key: 'Residual norm:', value: residualNorm },
|
|
228
|
+
{ key: 'Step size:', value: stepResult.stepNorm },
|
|
229
|
+
{ key: 'Constraint norm:', value: constraintNorm },
|
|
230
|
+
{ key: 'Lambda:', value: updatedLambda }
|
|
231
|
+
]);
|
|
232
|
+
return { stepAccepted: false, updatedLambda, stepSizeConverged: true };
|
|
233
|
+
}
|
|
234
|
+
updatedLambda = stepResult.newLambda;
|
|
235
|
+
if (stepResult.stepAccepted && stepResult.newParameters && stepResult.newStates) {
|
|
236
|
+
updatedParameters = stepResult.newParameters;
|
|
237
|
+
updatedStates = stepResult.newStates;
|
|
238
|
+
stepAccepted = true;
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
if (!stepAccepted && updatedLambda >= MAXIMUM_LAMBDA_THRESHOLD) {
|
|
242
|
+
logger.warn('constrainedLevenbergMarquardt', iteration, 'Could not find acceptable step even with maximum lambda. Stopping optimization.', [
|
|
243
|
+
{ key: 'Lambda:', value: updatedLambda },
|
|
244
|
+
{ key: 'Cost:', value: cost }
|
|
245
|
+
]);
|
|
246
|
+
return { stepAccepted: false, updatedLambda, shouldStop: true };
|
|
247
|
+
}
|
|
248
|
+
return { stepAccepted, updatedParameters, updatedStates, updatedLambda };
|
|
249
|
+
}
|
|
250
|
+
/**
|
|
251
|
+
* Processes step result and checks residual convergence.
|
|
252
|
+
* Returns convergence result if converged, or best solution found.
|
|
253
|
+
*/
|
|
254
|
+
function processStepResult(updatedParameters, updatedStates, updatedLambda, residualFunction, constraintFunction, constraintTolerance, tolResidual, iteration, gradientNorm, logger) {
|
|
255
|
+
const currentResidual = residualFunction(updatedParameters, updatedStates);
|
|
256
|
+
const currentResidualNorm = vectorNorm(currentResidual);
|
|
257
|
+
const currentCost = computeSumOfSquaredResiduals(currentResidualNorm);
|
|
258
|
+
const currentConstraint = constraintFunction(updatedParameters, updatedStates);
|
|
259
|
+
const currentConstraintNorm = vectorNorm(currentConstraint);
|
|
260
|
+
if (currentConstraintNorm <= constraintTolerance && checkResidualConvergence(currentResidualNorm, tolResidual, iteration)) {
|
|
261
|
+
logger.info('constrainedLevenbergMarquardt', iteration, 'Converged', [
|
|
262
|
+
{ key: 'Cost:', value: currentCost },
|
|
263
|
+
{ key: 'Gradient norm:', value: gradientNorm },
|
|
264
|
+
{ key: 'Residual norm:', value: currentResidualNorm },
|
|
265
|
+
{ key: 'Constraint norm:', value: currentConstraintNorm },
|
|
266
|
+
{ key: 'Lambda:', value: updatedLambda }
|
|
267
|
+
]);
|
|
268
|
+
return {
|
|
269
|
+
converged: true,
|
|
270
|
+
result: createConvergenceResultForLM(updatedParameters, updatedStates, iteration, true, currentCost, gradientNorm, currentResidualNorm, currentConstraintNorm, updatedLambda),
|
|
271
|
+
bestCost: currentCost,
|
|
272
|
+
bestParameters: updatedParameters,
|
|
273
|
+
bestStates: updatedStates
|
|
274
|
+
};
|
|
275
|
+
}
|
|
276
|
+
return {
|
|
277
|
+
converged: false,
|
|
278
|
+
bestCost: currentCost,
|
|
279
|
+
bestParameters: updatedParameters,
|
|
280
|
+
bestStates: updatedStates
|
|
281
|
+
};
|
|
282
|
+
}
|
|
283
|
+
/**
|
|
284
|
+
* Performs one iteration of constrained Levenberg-Marquardt optimization.
|
|
285
|
+
* Returns updated parameters/states/lambda or null if converged/error occurred.
|
|
286
|
+
*/
|
|
287
|
+
function performConstrainedLevenbergMarquardtIteration(currentParameters, currentStates, currentLambda, residualFunction, constraintFunction, effectiveJacobianOptions, tolGradient, tolStep, tolResidual, constraintTolerance, stepSizeP, stepSizeX, lambdaFactor, iteration, logger, onIteration, dcdp, dcdx) {
|
|
288
|
+
const { constraintNorm } = checkConstraintViolation(currentParameters, currentStates, constraintFunction, constraintTolerance, iteration, logger);
|
|
289
|
+
const residual = residualFunction(currentParameters, currentStates);
|
|
290
|
+
const residualNorm = vectorNorm(residual);
|
|
291
|
+
const cost = computeSumOfSquaredResiduals(residualNorm);
|
|
292
|
+
if (onIteration) {
|
|
293
|
+
onIteration(iteration, cost, currentParameters);
|
|
294
|
+
}
|
|
295
|
+
const effectiveJacobian = computeEffectiveJacobian(currentParameters, currentStates, residualFunction, constraintFunction, effectiveJacobianOptions, logger, 'constrainedLevenbergMarquardt');
|
|
296
|
+
const { jtj, jtr } = computeNormalEquationsMatrices(effectiveJacobian, residual);
|
|
297
|
+
const gradientVector = matrixToFloat64Array(jtr);
|
|
298
|
+
const gradientNorm = vectorNorm(gradientVector);
|
|
299
|
+
const constraintSatisfied = constraintNorm <= constraintTolerance;
|
|
300
|
+
const gradientConvergenceResult = checkConvergenceForLM(gradientNorm, constraintSatisfied, tolGradient, iteration, currentParameters, currentStates, cost, residualNorm, constraintNorm, currentLambda, logger);
|
|
301
|
+
if (gradientConvergenceResult) {
|
|
302
|
+
return {
|
|
303
|
+
converged: true,
|
|
304
|
+
result: gradientConvergenceResult
|
|
305
|
+
};
|
|
306
|
+
}
|
|
307
|
+
const stepResult = tryStepWithLambda(jtj, jtr, currentParameters, currentStates, currentLambda, lambdaFactor, residualFunction, constraintFunction, cost, tolStep, iteration, stepSizeP, stepSizeX, constraintTolerance, constraintSatisfied, gradientNorm, residualNorm, constraintNorm, logger, dcdp, dcdx);
|
|
308
|
+
if (stepResult.shouldStop) {
|
|
309
|
+
return { converged: false, shouldStop: true };
|
|
310
|
+
}
|
|
311
|
+
if (stepResult.stepSizeConverged) {
|
|
312
|
+
return {
|
|
313
|
+
converged: true,
|
|
314
|
+
result: createConvergenceResultForLM(currentParameters, currentStates, iteration, true, cost, gradientNorm, residualNorm, constraintNorm, stepResult.updatedLambda)
|
|
315
|
+
};
|
|
316
|
+
}
|
|
317
|
+
if (stepResult.stepAccepted && stepResult.updatedParameters && stepResult.updatedStates) {
|
|
318
|
+
const processResult = processStepResult(stepResult.updatedParameters, stepResult.updatedStates, stepResult.updatedLambda, residualFunction, constraintFunction, constraintTolerance, tolResidual, iteration, gradientNorm, logger);
|
|
319
|
+
if (processResult.converged && processResult.result) {
|
|
320
|
+
return {
|
|
321
|
+
converged: true,
|
|
322
|
+
result: processResult.result
|
|
323
|
+
};
|
|
324
|
+
}
|
|
325
|
+
return {
|
|
326
|
+
converged: false,
|
|
327
|
+
newParameters: processResult.bestParameters,
|
|
328
|
+
newStates: processResult.bestStates,
|
|
329
|
+
newLambda: stepResult.updatedLambda,
|
|
330
|
+
bestCost: processResult.bestCost,
|
|
331
|
+
bestParameters: processResult.bestParameters,
|
|
332
|
+
bestStates: processResult.bestStates
|
|
333
|
+
};
|
|
334
|
+
}
|
|
335
|
+
return { converged: false, newLambda: stepResult.updatedLambda };
|
|
336
|
+
}
|
|
337
|
+
/**
|
|
338
|
+
* Initializes state for constrained Levenberg-Marquardt optimization.
|
|
339
|
+
* Tracks best solution found to return if max iterations reached.
|
|
340
|
+
*/
|
|
341
|
+
function initializeLMState(initialParameters, initialStates, residualFunction) {
|
|
342
|
+
const currentParameters = new Float64Array(initialParameters);
|
|
343
|
+
const currentStates = new Float64Array(initialStates);
|
|
344
|
+
const bestParameters = new Float64Array(initialParameters);
|
|
345
|
+
const bestStates = new Float64Array(initialStates);
|
|
346
|
+
const initialResidual = residualFunction(initialParameters, initialStates);
|
|
347
|
+
const initialResidualNorm = vectorNorm(initialResidual);
|
|
348
|
+
const bestCost = computeSumOfSquaredResiduals(initialResidualNorm);
|
|
349
|
+
return { currentParameters, currentStates, bestParameters, bestStates, bestCost };
|
|
350
|
+
}
|
|
351
|
+
/**
|
|
352
|
+
* Runs main iteration loop for constrained Levenberg-Marquardt optimization.
|
|
353
|
+
* Returns result if converged, or final state if max iterations reached.
|
|
354
|
+
*/
|
|
355
|
+
function runLMIterations(initialParameters, initialStates, residualFunction, constraintFunction, effectiveJacobianOptions, tolGradient, tolStep, tolResidual, constraintTolerance, stepSizeP, stepSizeX, lambdaInitial, lambdaFactor, maxIterations, logger, onIteration, dcdp, dcdx) {
|
|
356
|
+
const state = initializeLMState(initialParameters, initialStates, residualFunction);
|
|
357
|
+
let currentLambda = lambdaInitial;
|
|
358
|
+
let actualIterations = 0;
|
|
359
|
+
for (let iteration = 0; iteration < maxIterations; iteration++) {
|
|
360
|
+
actualIterations = iteration + 1;
|
|
361
|
+
const iterationResult = performConstrainedLevenbergMarquardtIteration(state.currentParameters, state.currentStates, currentLambda, residualFunction, constraintFunction, effectiveJacobianOptions, tolGradient, tolStep, tolResidual, constraintTolerance, stepSizeP, stepSizeX, lambdaFactor, iteration, logger, onIteration, dcdp, dcdx);
|
|
362
|
+
if (iterationResult.converged && iterationResult.result) {
|
|
363
|
+
return { result: iterationResult.result };
|
|
364
|
+
}
|
|
365
|
+
if (iterationResult.shouldStop) {
|
|
366
|
+
break;
|
|
367
|
+
}
|
|
368
|
+
if (iterationResult.bestCost !== undefined && iterationResult.bestCost < state.bestCost) {
|
|
369
|
+
state.bestCost = iterationResult.bestCost;
|
|
370
|
+
if (iterationResult.bestParameters && iterationResult.bestStates) {
|
|
371
|
+
state.bestParameters = new Float64Array(iterationResult.bestParameters);
|
|
372
|
+
state.bestStates = new Float64Array(iterationResult.bestStates);
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
if (iterationResult.newLambda !== undefined) {
|
|
376
|
+
currentLambda = iterationResult.newLambda;
|
|
377
|
+
}
|
|
378
|
+
if (iterationResult.newParameters && iterationResult.newStates) {
|
|
379
|
+
state.currentParameters = iterationResult.newParameters;
|
|
380
|
+
state.currentStates = iterationResult.newStates;
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
return {
|
|
384
|
+
bestParameters: state.bestParameters,
|
|
385
|
+
bestStates: state.bestStates,
|
|
386
|
+
bestCost: state.bestCost,
|
|
387
|
+
currentLambda,
|
|
388
|
+
actualIterations
|
|
389
|
+
};
|
|
390
|
+
}
|
|
391
|
+
/**
|
|
392
|
+
* Creates final result when max iterations reached or optimization stopped.
|
|
393
|
+
* Computes final gradient norm for diagnostic purposes.
|
|
394
|
+
*/
|
|
395
|
+
function createFinalLMResult(bestParameters, bestStates, bestCost, currentLambda, actualIterations, residualFunction, constraintFunction, effectiveJacobianOptions, logger) {
|
|
396
|
+
const finalResidual = residualFunction(bestParameters, bestStates);
|
|
397
|
+
const finalResidualNorm = vectorNorm(finalResidual);
|
|
398
|
+
const finalConstraint = constraintFunction(bestParameters, bestStates);
|
|
399
|
+
const finalConstraintNorm = vectorNorm(finalConstraint);
|
|
400
|
+
const finalEffectiveJacobian = computeEffectiveJacobian(bestParameters, bestStates, residualFunction, constraintFunction, effectiveJacobianOptions, logger, 'constrainedLevenbergMarquardt');
|
|
401
|
+
const { jtr: finalJtr } = computeNormalEquationsMatrices(finalEffectiveJacobian, finalResidual);
|
|
402
|
+
const finalGradientNorm = vectorNorm(matrixToFloat64Array(finalJtr));
|
|
403
|
+
return createConvergenceResultForLM(bestParameters, bestStates, actualIterations - 1, false, bestCost, finalGradientNorm, finalResidualNorm, finalConstraintNorm, currentLambda);
|
|
404
|
+
}
|
|
405
|
+
/**
|
|
406
|
+
* Performs constrained Levenberg-Marquardt optimization for nonlinear least squares problems.
|
|
407
|
+
*
|
|
408
|
+
* Algorithm:
|
|
409
|
+
* 1. Start with initial parameters p0, states x0, and lambda (damping parameter)
|
|
410
|
+
* 2. Compute effective Jacobian J_eff = r_p - r_x C_x^+ C_p
|
|
411
|
+
* 3. Solve damped normal equations: (J_eff^T J_eff + λI) δ = -J_eff^T r
|
|
412
|
+
* 4. Try step: p_new = p_old + δ, x_new updated using linear approximation
|
|
413
|
+
* 5. If cost decreases: accept step, decrease lambda
|
|
414
|
+
* 6. If cost increases: reject step, increase lambda
|
|
415
|
+
* 7. Repeat until convergence
|
|
416
|
+
*
|
|
417
|
+
* The damping parameter lambda interpolates between:
|
|
418
|
+
* - Constrained Gauss-Newton (λ → 0): fast convergence near solution
|
|
419
|
+
* - Constrained gradient descent (λ → ∞): robust but slow
|
|
420
|
+
*
|
|
421
|
+
* @param initialParameters - Initial parameter vector p0
|
|
422
|
+
* @param initialStates - Initial state vector x0 (should satisfy c(p0, x0) = 0)
|
|
423
|
+
* @param residualFunction - Residual function r(p, x)
|
|
424
|
+
* @param constraintFunction - Constraint function c(p, x) = 0
|
|
425
|
+
* @param options - Optimization options
|
|
426
|
+
* @returns Optimization result with final parameters, states, constraint norm, and lambda
|
|
427
|
+
*/
|
|
428
|
+
export function constrainedLevenbergMarquardt(initialParameters, initialStates, residualFunction, constraintFunction, options = {}) {
|
|
429
|
+
const maxIterations = options.maxIterations ?? DEFAULT_MAX_ITERATIONS;
|
|
430
|
+
const lambdaInitial = options.lambdaInitial ?? DEFAULT_LAMBDA_INITIAL;
|
|
431
|
+
const lambdaFactor = options.lambdaFactor ?? DEFAULT_LAMBDA_FACTOR;
|
|
432
|
+
const tolGradient = options.tolGradient ?? DEFAULT_TOL_GRADIENT;
|
|
433
|
+
const tolStep = options.tolStep ?? DEFAULT_TOL_STEP;
|
|
434
|
+
const tolResidual = options.tolResidual ?? DEFAULT_TOL_RESIDUAL;
|
|
435
|
+
const constraintTolerance = options.constraintTolerance ?? DEFAULT_CONSTRAINT_TOLERANCE;
|
|
436
|
+
const stepSizeP = options.stepSizeP ?? DEFAULT_STEP_SIZE_P;
|
|
437
|
+
const stepSizeX = options.stepSizeX ?? DEFAULT_STEP_SIZE_X;
|
|
438
|
+
const onIteration = options.onIteration;
|
|
439
|
+
const logger = new Logger(options.logLevel, options.verbose);
|
|
440
|
+
validateInitialConditions(initialParameters, initialStates, constraintFunction, constraintTolerance, logger, 'constrainedLevenbergMarquardt');
|
|
441
|
+
const effectiveJacobianOptions = {
|
|
442
|
+
drdp: options.drdp,
|
|
443
|
+
drdx: options.drdx,
|
|
444
|
+
dcdp: options.dcdp,
|
|
445
|
+
dcdx: options.dcdx,
|
|
446
|
+
stepSizeP,
|
|
447
|
+
stepSizeX
|
|
448
|
+
};
|
|
449
|
+
const iterationResult = runLMIterations(initialParameters, initialStates, residualFunction, constraintFunction, effectiveJacobianOptions, tolGradient, tolStep, tolResidual, constraintTolerance, stepSizeP, stepSizeX, lambdaInitial, lambdaFactor, maxIterations, logger, onIteration, options.dcdp, options.dcdx);
|
|
450
|
+
if ('result' in iterationResult) {
|
|
451
|
+
return iterationResult.result;
|
|
452
|
+
}
|
|
453
|
+
const finalResidual = residualFunction(iterationResult.bestParameters, iterationResult.bestStates);
|
|
454
|
+
const finalResidualNorm = vectorNorm(finalResidual);
|
|
455
|
+
const finalConstraint = constraintFunction(iterationResult.bestParameters, iterationResult.bestStates);
|
|
456
|
+
const finalConstraintNorm = vectorNorm(finalConstraint);
|
|
457
|
+
logger.warn('constrainedLevenbergMarquardt', undefined, 'Maximum iterations reached', [
|
|
458
|
+
{ key: 'Iterations:', value: iterationResult.actualIterations },
|
|
459
|
+
{ key: 'Final cost:', value: iterationResult.bestCost },
|
|
460
|
+
{
|
|
461
|
+
key: 'Final gradient norm:', value: vectorNorm(matrixToFloat64Array(computeNormalEquationsMatrices(computeEffectiveJacobian(iterationResult.bestParameters, iterationResult.bestStates, residualFunction, constraintFunction, effectiveJacobianOptions, logger, 'constrainedLevenbergMarquardt'), finalResidual).jtr))
|
|
462
|
+
},
|
|
463
|
+
{ key: 'Final residual norm:', value: finalResidualNorm },
|
|
464
|
+
{ key: 'Final constraint norm:', value: finalConstraintNorm },
|
|
465
|
+
{ key: 'Final lambda:', value: iterationResult.currentLambda }
|
|
466
|
+
]);
|
|
467
|
+
return createFinalLMResult(iterationResult.bestParameters, iterationResult.bestStates, iterationResult.bestCost, iterationResult.currentLambda, iterationResult.actualIterations, residualFunction, constraintFunction, effectiveJacobianOptions, logger);
|
|
468
|
+
}
|
|
469
|
+
//# sourceMappingURL=constrainedLevenbergMarquardt.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"constrainedLevenbergMarquardt.js","sourceRoot":"","sources":["../../src/core/constrainedLevenbergMarquardt.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;GAmBG;AAEH,OAAO,EAAE,MAAM,EAAE,qBAAqB,EAAE,MAAM,WAAW,CAAC;AAO1D,OAAO,EAAE,oBAAoB,EAAE,oBAAoB,EAAE,UAAU,EAAE,4BAA4B,EAAE,MAAM,oBAAoB,CAAC;AAC1H,OAAO,EAAE,wBAAwB,EAAE,wBAAwB,EAAE,wBAAwB,EAAE,MAAM,kBAAkB,CAAC;AAChH,OAAO,EAAE,wBAAwB,EAAiC,MAAM,wBAAwB,CAAC;AACjG,OAAO,EAAE,YAAY,EAAE,yBAAyB,EAAE,0BAA0B,EAAE,MAAM,uBAAuB,CAAC;AAC5G,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAC;AACrC,OAAO,EACL,4BAA4B,EAC5B,4BAA4B,EAC7B,MAAM,iBAAiB,CAAC;AAEzB,MAAM,sBAAsB,GAAG,IAAI,CAAC;AACpC,MAAM,sBAAsB,GAAG,IAAI,CAAC;AACpC,MAAM,qBAAqB,GAAG,IAAI,CAAC;AACnC,MAAM,oBAAoB,GAAG,IAAI,CAAC;AAClC,MAAM,gBAAgB,GAAG,IAAI,CAAC;AAC9B,MAAM,oBAAoB,GAAG,IAAI,CAAC;AAClC,MAAM,4BAA4B,GAAG,IAAI,CAAC;AAC1C,MAAM,mBAAmB,GAAG,IAAI,CAAC;AACjC,MAAM,mBAAmB,GAAG,IAAI,CAAC;AACjC,MAAM,wBAAwB,GAAG,IAAI,CAAC,CAAC,2DAA2D;AAClG,MAAM,oBAAoB,GAAG,CAAC,GAAG,CAAC,CAAC,2GAA2G;AAE9I;;;GAGG;AACH,SAAS,8BAA8B,CACrC,iBAAyB,EACzB,QAAsB;IAEtB,MAAM,iBAAiB,GAAG,iBAAiB,CAAC,SAAS,EAAE,CAAC;IACxD,MAAM,GAAG,GAAG,iBAAiB,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;IACtD,MAAM,cAAc,GAAG,oBAAoB,CAAC,QAAQ,CAAC,CAAC;IACtD,MAAM,GAAG,GAAG,iBAAiB,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;IACnD,OAAO,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC;AACtB,CAAC;AAED;;;GAGG;AACH,SAAS,4BAA4B,CACnC,UAAwB,EACxB,MAAoB,EACpB,SAAiB,EACjB,SAAkB,EAClB,SAAiB,EACjB,iBAAyB,EACzB,iBAAyB,EACzB,mBAA2B,EAC3B,WAAmB;IAEnB,OAAO;QACL,UAAU;QACV,UAAU,EAAE,SAAS,GAAG,CAAC;QACzB,SAAS;QACT,SAAS;QACT,iBAAiB;QACjB,iBAAiB;QACjB,WAAW;QACX,WAAW,EAAE,MAAM;QACnB,mBAAmB;KACpB,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,SAAS,wBAAwB,CAC/B,iBAA+B,EAC/B,aAA2B,EAC3B,kBAAgC,EAChC,mBAA2B,EAC3B,SAAiB,EACjB,MAAc;IAEd,MAAM,UAAU,GAAG,kBAAkB,CAAC,iBAAiB,EAAE,aAAa,CAAC,CAAC;IACxE,MAAM,cAAc,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC;IAC9C,IAAI,cAAc,GAAG,mBAAmB,EAAE,CAAC;QACzC,MAAM,CAAC,IAAI,CAAC,+BAA+B,EAAE,SAAS,EAAE,+BAA+B,EAAE;YACvF,EAAE,GAAG,EAAE,aAAa,EAAE,KAAK,EAAE,cAAc,EAAE;YAC7C,EAAE,GAAG,EAAE,YAAY,EAAE,KAAK,EAAE,mBAAmB,EAAE;SAClD,CAAC,CAAC;IACL,CAAC;IACD,OAAO,EAAE,cAAc,EAAE,CAAC;AAC5B,CAAC;AAED;;;;GAIG;AACH,SAAS,0BAA0B,CACjC,GAAW,EACX,GAAW,EACX,aAAqB,EACrB,YAAoB;IAEpB,MAAM,cAAc,GAAG,GAAG,CAAC,IAAI,CAAC;IAChC,MAAM,QAAQ,GAAG,MAAM,CAAC,GAAG,CAAC,cAAc,EAAE,cAAc,CAAC,CAAC;IAC5D,MAAM,aAAa,GAAG,GAAG,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC;IAE3D,MAAM,WAAW,GAAG,GAAG,CAAC,GAAG,CAAC,oBAAoB,CAAC,CAAC;IAClD,IAAI,UAAkB,CAAC;IACvB,IAAI,CAAC;QACH,MAAM,QAAQ,GAAG,IAAI,qBAAqB,CAAC,aAAa,CAAC,CAAC;QAC1D,IAAI,QAAQ,CAAC,kBAAkB,EAAE,EAAE,CAAC;YAClC,UAAU,GAAG,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;QAC3C,CAAC;aAAM,CAAC;YACN,MAAM,SAAS,GAAG,aAAa,GAAG,YAAY,CAAC;YAC/C,OAAO,EAAE,SAAS,EAAE,CAAC;QACvB,CAAC;IACH,CAAC;IAAC,OAAO,aAAa,EAAE,CAAC;QACvB,MAAM,SAAS,GAAG,aAAa,GAAG,YAAY,CAAC;QAC/C,OAAO,EAAE,SAAS,EAAE,CAAC;IACvB,CAAC;IACD,MAAM,IAAI,GAAG,oBAAoB,CAAC,UAAU,CAAC,CAAC;IAC9C,MAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC;IAClC,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC;AAC5B,CAAC;AAED;;;GAGG;AACH,SAAS,mBAAmB,CAC1B,aAA2B,EAC3B,SAAuB,EACvB,OAAe,EACf,WAAmB,EACnB,aAAqB,EACrB,YAAoB,EACpB,SAAiB,EACjB,MAAc;IAEd,IAAI,OAAO,GAAG,WAAW,EAAE,CAAC;QAC1B,MAAM,SAAS,GAAG,aAAa,GAAG,YAAY,CAAC;QAC/C,MAAM,CAAC,KAAK,CAAC,+BAA+B,EAAE,SAAS,EAAE,eAAe,EAAE;YACxE,EAAE,GAAG,EAAE,OAAO,EAAE,KAAK,EAAE,WAAW,EAAE;YACpC,EAAE,GAAG,EAAE,WAAW,EAAE,KAAK,EAAE,OAAO,EAAE;YACpC,EAAE,GAAG,EAAE,SAAS,EAAE,KAAK,EAAE,SAAS,EAAE;SACrC,CAAC,CAAC;QACH,OAAO,EAAE,YAAY,EAAE,IAAI,EAAE,aAAa,EAAE,SAAS,EAAE,SAAS,EAAE,CAAC;IACrE,CAAC;IAED,MAAM,SAAS,GAAG,aAAa,GAAG,YAAY,CAAC;IAC/C,MAAM,CAAC,KAAK,CAAC,+BAA+B,EAAE,SAAS,EAAE,eAAe,EAAE;QACxE,EAAE,GAAG,EAAE,OAAO,EAAE,KAAK,EAAE,WAAW,EAAE;QACpC,EAAE,GAAG,EAAE,WAAW,EAAE,KAAK,EAAE,OAAO,EAAE;QACpC,EAAE,GAAG,EAAE,SAAS,EAAE,KAAK,EAAE,SAAS,EAAE;KACrC,CAAC,CAAC;IACH,OAAO,EAAE,YAAY,EAAE,KAAK,EAAE,aAAa,EAAE,SAAS,EAAE,SAAS,EAAE,CAAC;AACtE,CAAC;AAED;;;GAGG;AACH,SAAS,oCAAoC,CAC3C,GAAW,EACX,GAAW,EACX,iBAA+B,EAC/B,aAA2B,EAC3B,aAAqB,EACrB,YAAoB,EACpB,gBAAuC,EACvC,kBAAgC,EAChC,WAAmB,EACnB,OAAe,EACf,SAAiB,EACjB,SAAiB,EACjB,SAAiB,EACjB,mBAA2B,EAC3B,MAAc,EACd,IAAiE,EACjE,IAAiE;IASjE,mFAAmF;IACnF,IAAI,aAAa,IAAI,wBAAwB,EAAE,CAAC;QAC9C,MAAM,CAAC,IAAI,CAAC,+BAA+B,EAAE,SAAS,EAAE,yCAAyC,EAAE;YACjG,EAAE,GAAG,EAAE,SAAS,EAAE,KAAK,EAAE,aAAa,EAAE;YACxC,EAAE,GAAG,EAAE,OAAO,EAAE,KAAK,EAAE,WAAW,EAAE;SACrC,CAAC,CAAC;QACH,OAAO,EAAE,YAAY,EAAE,KAAK,EAAE,SAAS,EAAE,aAAa,EAAE,UAAU,EAAE,IAAI,EAAE,CAAC;IAC7E,CAAC;IAED,IAAI,CAAC;QACH,MAAM,WAAW,GAAG,0BAA0B,CAAC,GAAG,EAAE,GAAG,EAAE,aAAa,EAAE,YAAY,CAAC,CAAC;QAEtF,IAAI,WAAW,IAAI,WAAW,EAAE,CAAC;YAC/B,OAAO,EAAE,YAAY,EAAE,KAAK,EAAE,SAAS,EAAE,WAAW,CAAC,SAAS,EAAE,CAAC;QACnE,CAAC;QAED,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,GAAG,WAAW,CAAC;QAEvC,IAAI,wBAAwB,CAAC,QAAQ,EAAE,OAAO,EAAE,SAAS,CAAC,EAAE,CAAC;YAC3D,MAAM,SAAS,GAAG,aAAa,GAAG,YAAY,CAAC;YAC/C,OAAO,EAAE,YAAY,EAAE,KAAK,EAAE,SAAS,EAAE,QAAQ,EAAE,CAAC;QACtD,CAAC;QAED,MAAM,aAAa,GAAG,IAAI,YAAY,CAAC,iBAAiB,CAAC,MAAM,CAAC,CAAC;QACjE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,iBAAiB,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YAClD,aAAa,CAAC,CAAC,CAAC,GAAG,iBAAiB,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;QACpD,CAAC;QAED,MAAM,GAAG,GAAG,IAAI;YACd,CAAC,CAAC,IAAI,CAAC,iBAAiB,EAAE,aAAa,CAAC;YACxC,CAAC,CAAC,4BAA4B,CAAC,iBAAiB,EAAE,aAAa,EAAE,kBAAkB,EAAE,EAAE,QAAQ,EAAE,SAAS,EAAE,CAAC,CAAC;QAChH,MAAM,GAAG,GAAG,IAAI;YACd,CAAC,CAAC,IAAI,CAAC,iBAAiB,EAAE,aAAa,CAAC;YACxC,CAAC,CAAC,4BAA4B,CAAC,iBAAiB,EAAE,aAAa,EAAE,kBAAkB,EAAE,EAAE,QAAQ,EAAE,SAAS,EAAE,CAAC,CAAC;QAEhH,MAAM,SAAS,GAAG,YAAY,CAAC,aAAa,EAAE,GAAG,EAAE,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,+BAA+B,CAAiB,CAAC;QAEvH,MAAM,eAAe,GAAG,0BAA0B,CAChD,aAAa,EACb,SAAS,EACT,kBAAkB,EAClB,SAAS,EACT,mBAAmB,EACnB,MAAM,EACN,+BAA+B,CAChC,CAAC;QAEF,MAAM,WAAW,GAAG,gBAAgB,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;QACrE,MAAM,eAAe,GAAG,UAAU,CAAC,WAAW,CAAC,CAAC;QAChD,MAAM,OAAO,GAAG,4BAA4B,CAAC,eAAe,CAAC,CAAC;QAE9D,MAAM,gBAAgB,GAAG,mBAAmB,CAC1C,aAAa,EACb,eAAe,EACf,OAAO,EACP,WAAW,EACX,aAAa,EACb,YAAY,EACZ,SAAS,EACT,MAAM,CACP,CAAC;QAEF,OAAO;YACL,YAAY,EAAE,gBAAgB,CAAC,YAAY;YAC3C,aAAa;YACb,SAAS,EAAE,gBAAgB,CAAC,SAAS;YACrC,SAAS,EAAE,gBAAgB,CAAC,SAAS;SACtC,CAAC;IACJ,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,sFAAsF;QACtF,MAAM,SAAS,GAAG,aAAa,GAAG,YAAY,CAAC;QAC/C,IAAI,MAAM,EAAE,CAAC;YACX,MAAM,CAAC,IAAI,CAAC,+BAA+B,EAAE,SAAS,EAAE,gDAAgD,EAAE;gBACxG,EAAE,GAAG,EAAE,SAAS,EAAE,KAAK,EAAE,SAAS,EAAE;gBACpC,EAAE,GAAG,EAAE,OAAO,EAAE,KAAK,EAAE,WAAW,EAAE;aACrC,CAAC,CAAC;QACL,CAAC;QACD,OAAO,EAAE,YAAY,EAAE,KAAK,EAAE,SAAS,EAAE,CAAC;IAC5C,CAAC;AACH,CAAC;AAED;;;GAGG;AACH,SAAS,qBAAqB,CAC5B,YAAoB,EACpB,mBAA4B,EAC5B,WAAmB,EACnB,SAAiB,EACjB,iBAA+B,EAC/B,aAA2B,EAC3B,IAAY,EACZ,YAAoB,EACpB,cAAsB,EACtB,aAAqB,EACrB,MAAc;IAEd,IAAI,mBAAmB,IAAI,wBAAwB,CAAC,YAAY,EAAE,WAAW,EAAE,SAAS,CAAC,EAAE,CAAC;QAC1F,MAAM,CAAC,IAAI,CAAC,+BAA+B,EAAE,SAAS,EAAE,WAAW,EAAE;YACnE,EAAE,GAAG,EAAE,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE;YAC7B,EAAE,GAAG,EAAE,gBAAgB,EAAE,KAAK,EAAE,YAAY,EAAE;YAC9C,EAAE,GAAG,EAAE,gBAAgB,EAAE,KAAK,EAAE,YAAY,EAAE;YAC9C,EAAE,GAAG,EAAE,kBAAkB,EAAE,KAAK,EAAE,cAAc,EAAE;YAClD,EAAE,GAAG,EAAE,SAAS,EAAE,KAAK,EAAE,aAAa,EAAE;SACzC,CAAC,CAAC;QACH,OAAO,4BAA4B,CACjC,iBAAiB,EACjB,aAAa,EACb,SAAS,EACT,IAAI,EACJ,IAAI,EACJ,YAAY,EACZ,YAAY,EACZ,cAAc,EACd,aAAa,CACd,CAAC;IACJ,CAAC;IACD,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;GAGG;AACH,SAAS,iBAAiB,CACxB,GAAW,EACX,GAAW,EACX,iBAA+B,EAC/B,aAA2B,EAC3B,aAAqB,EACrB,YAAoB,EACpB,gBAAuC,EACvC,kBAAgC,EAChC,IAAY,EACZ,OAAe,EACf,SAAiB,EACjB,SAAiB,EACjB,SAAiB,EACjB,mBAA2B,EAC3B,mBAA4B,EAC5B,YAAoB,EACpB,YAAoB,EACpB,cAAsB,EACtB,MAAc,EACd,IAAiE,EACjE,IAAiE;IASjE,IAAI,YAAY,GAAG,KAAK,CAAC;IACzB,IAAI,aAAa,GAAG,aAAa,CAAC;IAClC,IAAI,iBAA2C,CAAC;IAChD,IAAI,aAAuC,CAAC;IAE5C,OAAO,CAAC,YAAY,IAAI,aAAa,GAAG,wBAAwB,EAAE,CAAC;QACjE,MAAM,UAAU,GAAG,oCAAoC,CACrD,GAAG,EACH,GAAG,EACH,iBAAiB,EACjB,aAAa,EACb,aAAa,EACb,YAAY,EACZ,gBAAgB,EAChB,kBAAkB,EAClB,IAAI,EACJ,OAAO,EACP,SAAS,EACT,SAAS,EACT,SAAS,EACT,mBAAmB,EACnB,MAAM,EACN,IAAI,EACJ,IAAI,CACL,CAAC;QAEF,IAAI,UAAU,CAAC,UAAU,EAAE,CAAC;YAC1B,OAAO,EAAE,YAAY,EAAE,KAAK,EAAE,aAAa,EAAE,UAAU,EAAE,IAAI,EAAE,CAAC;QAClE,CAAC;QAED,IACE,UAAU,CAAC,QAAQ,KAAK,SAAS;YACjC,mBAAmB;YACnB,wBAAwB,CAAC,UAAU,CAAC,QAAQ,EAAE,OAAO,EAAE,SAAS,CAAC,EACjE,CAAC;YACD,MAAM,CAAC,IAAI,CAAC,+BAA+B,EAAE,SAAS,EAAE,WAAW,EAAE;gBACnE,EAAE,GAAG,EAAE,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE;gBAC7B,EAAE,GAAG,EAAE,gBAAgB,EAAE,KAAK,EAAE,YAAY,EAAE;gBAC9C,EAAE,GAAG,EAAE,gBAAgB,EAAE,KAAK,EAAE,YAAY,EAAE;gBAC9C,EAAE,GAAG,EAAE,YAAY,EAAE,KAAK,EAAE,UAAU,CAAC,QAAQ,EAAE;gBACjD,EAAE,GAAG,EAAE,kBAAkB,EAAE,KAAK,EAAE,cAAc,EAAE;gBAClD,EAAE,GAAG,EAAE,SAAS,EAAE,KAAK,EAAE,aAAa,EAAE;aACzC,CAAC,CAAC;YACH,OAAO,EAAE,YAAY,EAAE,KAAK,EAAE,aAAa,EAAE,iBAAiB,EAAE,IAAI,EAAE,CAAC;QACzE,CAAC;QAED,aAAa,GAAG,UAAU,CAAC,SAAS,CAAC;QAErC,IAAI,UAAU,CAAC,YAAY,IAAI,UAAU,CAAC,aAAa,IAAI,UAAU,CAAC,SAAS,EAAE,CAAC;YAChF,iBAAiB,GAAG,UAAU,CAAC,aAA6B,CAAC;YAC7D,aAAa,GAAG,UAAU,CAAC,SAAyB,CAAC;YACrD,YAAY,GAAG,IAAI,CAAC;QACtB,CAAC;IACH,CAAC;IAED,IAAI,CAAC,YAAY,IAAI,aAAa,IAAI,wBAAwB,EAAE,CAAC;QAC/D,MAAM,CAAC,IAAI,CAAC,+BAA+B,EAAE,SAAS,EAAE,iFAAiF,EAAE;YACzI,EAAE,GAAG,EAAE,SAAS,EAAE,KAAK,EAAE,aAAa,EAAE;YACxC,EAAE,GAAG,EAAE,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE;SAC9B,CAAC,CAAC;QACH,OAAO,EAAE,YAAY,EAAE,KAAK,EAAE,aAAa,EAAE,UAAU,EAAE,IAAI,EAAE,CAAC;IAClE,CAAC;IAED,OAAO,EAAE,YAAY,EAAE,iBAAiB,EAAE,aAAa,EAAE,aAAa,EAAE,CAAC;AAC3E,CAAC;AAED;;;GAGG;AACH,SAAS,iBAAiB,CACxB,iBAA+B,EAC/B,aAA2B,EAC3B,aAAqB,EACrB,gBAAuC,EACvC,kBAAgC,EAChC,mBAA2B,EAC3B,WAAmB,EACnB,SAAiB,EACjB,YAAoB,EACpB,MAAc;IAQd,MAAM,eAAe,GAAG,gBAAgB,CAAC,iBAAiB,EAAE,aAAa,CAAC,CAAC;IAC3E,MAAM,mBAAmB,GAAG,UAAU,CAAC,eAAe,CAAC,CAAC;IACxD,MAAM,WAAW,GAAG,4BAA4B,CAAC,mBAAmB,CAAC,CAAC;IACtE,MAAM,iBAAiB,GAAG,kBAAkB,CAAC,iBAAiB,EAAE,aAAa,CAAC,CAAC;IAC/E,MAAM,qBAAqB,GAAG,UAAU,CAAC,iBAAiB,CAAC,CAAC;IAE5D,IAAI,qBAAqB,IAAI,mBAAmB,IAAI,wBAAwB,CAAC,mBAAmB,EAAE,WAAW,EAAE,SAAS,CAAC,EAAE,CAAC;QAC1H,MAAM,CAAC,IAAI,CAAC,+BAA+B,EAAE,SAAS,EAAE,WAAW,EAAE;YACnE,EAAE,GAAG,EAAE,OAAO,EAAE,KAAK,EAAE,WAAW,EAAE;YACpC,EAAE,GAAG,EAAE,gBAAgB,EAAE,KAAK,EAAE,YAAY,EAAE;YAC9C,EAAE,GAAG,EAAE,gBAAgB,EAAE,KAAK,EAAE,mBAAmB,EAAE;YACrD,EAAE,GAAG,EAAE,kBAAkB,EAAE,KAAK,EAAE,qBAAqB,EAAE;YACzD,EAAE,GAAG,EAAE,SAAS,EAAE,KAAK,EAAE,aAAa,EAAE;SACzC,CAAC,CAAC;QACH,OAAO;YACL,SAAS,EAAE,IAAI;YACf,MAAM,EAAE,4BAA4B,CAClC,iBAAiB,EACjB,aAAa,EACb,SAAS,EACT,IAAI,EACJ,WAAW,EACX,YAAY,EACZ,mBAAmB,EACnB,qBAAqB,EACrB,aAAa,CACd;YACD,QAAQ,EAAE,WAAW;YACrB,cAAc,EAAE,iBAAiB;YACjC,UAAU,EAAE,aAAa;SAC1B,CAAC;IACJ,CAAC;IAED,OAAO;QACL,SAAS,EAAE,KAAK;QAChB,QAAQ,EAAE,WAAW;QACrB,cAAc,EAAE,iBAAiB;QACjC,UAAU,EAAE,aAAa;KAC1B,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,SAAS,6CAA6C,CACpD,iBAA+B,EAC/B,aAA2B,EAC3B,aAAqB,EACrB,gBAAuC,EACvC,kBAAgC,EAChC,wBAAkD,EAClD,WAAmB,EACnB,OAAe,EACf,WAAmB,EACnB,mBAA2B,EAC3B,SAAiB,EACjB,SAAiB,EACjB,YAAoB,EACpB,SAAiB,EACjB,MAAc,EACd,WAAiF,EACjF,IAAiE,EACjE,IAAiE;IAYjE,MAAM,EAAE,cAAc,EAAE,GAAG,wBAAwB,CACjD,iBAAiB,EACjB,aAAa,EACb,kBAAkB,EAClB,mBAAmB,EACnB,SAAS,EACT,MAAM,CACP,CAAC;IAEF,MAAM,QAAQ,GAAG,gBAAgB,CAAC,iBAAiB,EAAE,aAAa,CAAC,CAAC;IACpE,MAAM,YAAY,GAAG,UAAU,CAAC,QAAQ,CAAC,CAAC;IAC1C,MAAM,IAAI,GAAG,4BAA4B,CAAC,YAAY,CAAC,CAAC;IAExD,IAAI,WAAW,EAAE,CAAC;QAChB,WAAW,CAAC,SAAS,EAAE,IAAI,EAAE,iBAAiB,CAAC,CAAC;IAClD,CAAC;IAED,MAAM,iBAAiB,GAAG,wBAAwB,CAChD,iBAAiB,EACjB,aAAa,EACb,gBAAgB,EAChB,kBAAkB,EAClB,wBAAwB,EACxB,MAAM,EACN,+BAA+B,CAChC,CAAC;IAEF,MAAM,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,8BAA8B,CAAC,iBAAiB,EAAE,QAAQ,CAAC,CAAC;IACjF,MAAM,cAAc,GAAG,oBAAoB,CAAC,GAAG,CAAC,CAAC;IACjD,MAAM,YAAY,GAAG,UAAU,CAAC,cAAc,CAAC,CAAC;IAChD,MAAM,mBAAmB,GAAG,cAAc,IAAI,mBAAmB,CAAC;IAElE,MAAM,yBAAyB,GAAG,qBAAqB,CACrD,YAAY,EACZ,mBAAmB,EACnB,WAAW,EACX,SAAS,EACT,iBAAiB,EACjB,aAAa,EACb,IAAI,EACJ,YAAY,EACZ,cAAc,EACd,aAAa,EACb,MAAM,CACP,CAAC;IAEF,IAAI,yBAAyB,EAAE,CAAC;QAC9B,OAAO;YACL,SAAS,EAAE,IAAI;YACf,MAAM,EAAE,yBAAyB;SAClC,CAAC;IACJ,CAAC;IAED,MAAM,UAAU,GAAG,iBAAiB,CAClC,GAAG,EACH,GAAG,EACH,iBAAiB,EACjB,aAAa,EACb,aAAa,EACb,YAAY,EACZ,gBAAgB,EAChB,kBAAkB,EAClB,IAAI,EACJ,OAAO,EACP,SAAS,EACT,SAAS,EACT,SAAS,EACT,mBAAmB,EACnB,mBAAmB,EACnB,YAAY,EACZ,YAAY,EACZ,cAAc,EACd,MAAM,EACN,IAAI,EACJ,IAAI,CACL,CAAC;IAEF,IAAI,UAAU,CAAC,UAAU,EAAE,CAAC;QAC1B,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,CAAC;IAChD,CAAC;IAED,IAAI,UAAU,CAAC,iBAAiB,EAAE,CAAC;QACjC,OAAO;YACL,SAAS,EAAE,IAAI;YACf,MAAM,EAAE,4BAA4B,CAClC,iBAAiB,EACjB,aAAa,EACb,SAAS,EACT,IAAI,EACJ,IAAI,EACJ,YAAY,EACZ,YAAY,EACZ,cAAc,EACd,UAAU,CAAC,aAAa,CACzB;SACF,CAAC;IACJ,CAAC;IAED,IAAI,UAAU,CAAC,YAAY,IAAI,UAAU,CAAC,iBAAiB,IAAI,UAAU,CAAC,aAAa,EAAE,CAAC;QACxF,MAAM,aAAa,GAAG,iBAAiB,CACrC,UAAU,CAAC,iBAAiB,EAC5B,UAAU,CAAC,aAAa,EACxB,UAAU,CAAC,aAAa,EACxB,gBAAgB,EAChB,kBAAkB,EAClB,mBAAmB,EACnB,WAAW,EACX,SAAS,EACT,YAAY,EACZ,MAAM,CACP,CAAC;QAEF,IAAI,aAAa,CAAC,SAAS,IAAI,aAAa,CAAC,MAAM,EAAE,CAAC;YACpD,OAAO;gBACL,SAAS,EAAE,IAAI;gBACf,MAAM,EAAE,aAAa,CAAC,MAAM;aAC7B,CAAC;QACJ,CAAC;QAED,OAAO;YACL,SAAS,EAAE,KAAK;YAChB,aAAa,EAAE,aAAa,CAAC,cAAc;YAC3C,SAAS,EAAE,aAAa,CAAC,UAAU;YACnC,SAAS,EAAE,UAAU,CAAC,aAAa;YACnC,QAAQ,EAAE,aAAa,CAAC,QAAQ;YAChC,cAAc,EAAE,aAAa,CAAC,cAAc;YAC5C,UAAU,EAAE,aAAa,CAAC,UAAU;SACrC,CAAC;IACJ,CAAC;IAED,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,SAAS,EAAE,UAAU,CAAC,aAAa,EAAE,CAAC;AACnE,CAAC;AAED;;;GAGG;AACH,SAAS,iBAAiB,CACxB,iBAA+B,EAC/B,aAA2B,EAC3B,gBAAuC;IAQvC,MAAM,iBAAiB,GAAiB,IAAI,YAAY,CAAC,iBAAiB,CAAC,CAAC;IAC5E,MAAM,aAAa,GAAiB,IAAI,YAAY,CAAC,aAAa,CAAC,CAAC;IACpE,MAAM,cAAc,GAAiB,IAAI,YAAY,CAAC,iBAAiB,CAAC,CAAC;IACzE,MAAM,UAAU,GAAiB,IAAI,YAAY,CAAC,aAAa,CAAC,CAAC;IACjE,MAAM,eAAe,GAAG,gBAAgB,CAAC,iBAAiB,EAAE,aAAa,CAAC,CAAC;IAC3E,MAAM,mBAAmB,GAAG,UAAU,CAAC,eAAe,CAAC,CAAC;IACxD,MAAM,QAAQ,GAAG,4BAA4B,CAAC,mBAAmB,CAAC,CAAC;IACnE,OAAO,EAAE,iBAAiB,EAAE,aAAa,EAAE,cAAc,EAAE,UAAU,EAAE,QAAQ,EAAE,CAAC;AACpF,CAAC;AAED;;;GAGG;AACH,SAAS,eAAe,CACtB,iBAA+B,EAC/B,aAA2B,EAC3B,gBAAuC,EACvC,kBAAgC,EAChC,wBAAkD,EAClD,WAAmB,EACnB,OAAe,EACf,WAAmB,EACnB,mBAA2B,EAC3B,SAAiB,EACjB,SAAiB,EACjB,aAAqB,EACrB,YAAoB,EACpB,aAAqB,EACrB,MAAc,EACd,WAAiF,EACjF,IAAiE,EACjE,IAAiE;IAUjE,MAAM,KAAK,GAAG,iBAAiB,CAAC,iBAAiB,EAAE,aAAa,EAAE,gBAAgB,CAAC,CAAC;IACpF,IAAI,aAAa,GAAG,aAAa,CAAC;IAClC,IAAI,gBAAgB,GAAG,CAAC,CAAC;IAEzB,KAAK,IAAI,SAAS,GAAG,CAAC,EAAE,SAAS,GAAG,aAAa,EAAE,SAAS,EAAE,EAAE,CAAC;QAC/D,gBAAgB,GAAG,SAAS,GAAG,CAAC,CAAC;QACjC,MAAM,eAAe,GAAG,6CAA6C,CACnE,KAAK,CAAC,iBAAiB,EACvB,KAAK,CAAC,aAAa,EACnB,aAAa,EACb,gBAAgB,EAChB,kBAAkB,EAClB,wBAAwB,EACxB,WAAW,EACX,OAAO,EACP,WAAW,EACX,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,YAAY,EACZ,SAAS,EACT,MAAM,EACN,WAAW,EACX,IAAI,EACJ,IAAI,CACL,CAAC;QAEF,IAAI,eAAe,CAAC,SAAS,IAAI,eAAe,CAAC,MAAM,EAAE,CAAC;YACxD,OAAO,EAAE,MAAM,EAAE,eAAe,CAAC,MAAM,EAAE,CAAC;QAC5C,CAAC;QAED,IAAI,eAAe,CAAC,UAAU,EAAE,CAAC;YAC/B,MAAM;QACR,CAAC;QAED,IAAI,eAAe,CAAC,QAAQ,KAAK,SAAS,IAAI,eAAe,CAAC,QAAQ,GAAG,KAAK,CAAC,QAAQ,EAAE,CAAC;YACxF,KAAK,CAAC,QAAQ,GAAG,eAAe,CAAC,QAAQ,CAAC;YAC1C,IAAI,eAAe,CAAC,cAAc,IAAI,eAAe,CAAC,UAAU,EAAE,CAAC;gBACjE,KAAK,CAAC,cAAc,GAAG,IAAI,YAAY,CAAC,eAAe,CAAC,cAAc,CAAC,CAAC;gBACxE,KAAK,CAAC,UAAU,GAAG,IAAI,YAAY,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC;YAClE,CAAC;QACH,CAAC;QAED,IAAI,eAAe,CAAC,SAAS,KAAK,SAAS,EAAE,CAAC;YAC5C,aAAa,GAAG,eAAe,CAAC,SAAS,CAAC;QAC5C,CAAC;QAED,IAAI,eAAe,CAAC,aAAa,IAAI,eAAe,CAAC,SAAS,EAAE,CAAC;YAC/D,KAAK,CAAC,iBAAiB,GAAG,eAAe,CAAC,aAA6B,CAAC;YACxE,KAAK,CAAC,aAAa,GAAG,eAAe,CAAC,SAAyB,CAAC;QAClE,CAAC;IACH,CAAC;IAED,OAAO;QACL,cAAc,EAAE,KAAK,CAAC,cAAc;QACpC,UAAU,EAAE,KAAK,CAAC,UAAU;QAC5B,QAAQ,EAAE,KAAK,CAAC,QAAQ;QACxB,aAAa;QACb,gBAAgB;KACjB,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,SAAS,mBAAmB,CAC1B,cAA4B,EAC5B,UAAwB,EACxB,QAAgB,EAChB,aAAqB,EACrB,gBAAwB,EACxB,gBAAuC,EACvC,kBAAgC,EAChC,wBAAkD,EAClD,MAAc;IAEd,MAAM,aAAa,GAAG,gBAAgB,CAAC,cAAc,EAAE,UAAU,CAAC,CAAC;IACnE,MAAM,iBAAiB,GAAG,UAAU,CAAC,aAAa,CAAC,CAAC;IACpD,MAAM,eAAe,GAAG,kBAAkB,CAAC,cAAc,EAAE,UAAU,CAAC,CAAC;IACvE,MAAM,mBAAmB,GAAG,UAAU,CAAC,eAAe,CAAC,CAAC;IAExD,MAAM,sBAAsB,GAAG,wBAAwB,CACrD,cAAc,EACd,UAAU,EACV,gBAAgB,EAChB,kBAAkB,EAClB,wBAAwB,EACxB,MAAM,EACN,+BAA+B,CAChC,CAAC;IACF,MAAM,EAAE,GAAG,EAAE,QAAQ,EAAE,GAAG,8BAA8B,CAAC,sBAAsB,EAAE,aAAa,CAAC,CAAC;IAChG,MAAM,iBAAiB,GAAG,UAAU,CAAC,oBAAoB,CAAC,QAAQ,CAAC,CAAC,CAAC;IAErE,OAAO,4BAA4B,CACjC,cAAc,EACd,UAAU,EACV,gBAAgB,GAAG,CAAC,EACpB,KAAK,EACL,QAAQ,EACR,iBAAiB,EACjB,iBAAiB,EACjB,mBAAmB,EACnB,aAAa,CACd,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;;;;;;;;;;GAsBG;AACH,MAAM,UAAU,6BAA6B,CAC3C,iBAA+B,EAC/B,aAA2B,EAC3B,gBAAuC,EACvC,kBAAgC,EAChC,UAAgD,EAAE;IAElD,MAAM,aAAa,GAAG,OAAO,CAAC,aAAa,IAAI,sBAAsB,CAAC;IACtE,MAAM,aAAa,GAAG,OAAO,CAAC,aAAa,IAAI,sBAAsB,CAAC;IACtE,MAAM,YAAY,GAAG,OAAO,CAAC,YAAY,IAAI,qBAAqB,CAAC;IACnE,MAAM,WAAW,GAAG,OAAO,CAAC,WAAW,IAAI,oBAAoB,CAAC;IAChE,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,gBAAgB,CAAC;IACpD,MAAM,WAAW,GAAG,OAAO,CAAC,WAAW,IAAI,oBAAoB,CAAC;IAChE,MAAM,mBAAmB,GAAG,OAAO,CAAC,mBAAmB,IAAI,4BAA4B,CAAC;IACxF,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,IAAI,mBAAmB,CAAC;IAC3D,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,IAAI,mBAAmB,CAAC;IAC3D,MAAM,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;IACxC,MAAM,MAAM,GAAG,IAAI,MAAM,CAAC,OAAO,CAAC,QAAQ,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC;IAE7D,yBAAyB,CACvB,iBAAiB,EACjB,aAAa,EACb,kBAAkB,EAClB,mBAAmB,EACnB,MAAM,EACN,+BAA+B,CAChC,CAAC;IAEF,MAAM,wBAAwB,GAA6B;QACzD,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,SAAS;QACT,SAAS;KACV,CAAC;IAEF,MAAM,eAAe,GAAG,eAAe,CACrC,iBAAiB,EACjB,aAAa,EACb,gBAAgB,EAChB,kBAAkB,EAClB,wBAAwB,EACxB,WAAW,EACX,OAAO,EACP,WAAW,EACX,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,aAAa,EACb,YAAY,EACZ,aAAa,EACb,MAAM,EACN,WAAW,EACX,OAAO,CAAC,IAAI,EACZ,OAAO,CAAC,IAAI,CACb,CAAC;IAEF,IAAI,QAAQ,IAAI,eAAe,EAAE,CAAC;QAChC,OAAO,eAAe,CAAC,MAAM,CAAC;IAChC,CAAC;IAED,MAAM,aAAa,GAAG,gBAAgB,CAAC,eAAe,CAAC,cAAc,EAAE,eAAe,CAAC,UAAU,CAAC,CAAC;IACnG,MAAM,iBAAiB,GAAG,UAAU,CAAC,aAAa,CAAC,CAAC;IACpD,MAAM,eAAe,GAAG,kBAAkB,CAAC,eAAe,CAAC,cAAc,EAAE,eAAe,CAAC,UAAU,CAAC,CAAC;IACvG,MAAM,mBAAmB,GAAG,UAAU,CAAC,eAAe,CAAC,CAAC;IAExD,MAAM,CAAC,IAAI,CAAC,+BAA+B,EAAE,SAAS,EAAE,4BAA4B,EAAE;QACpF,EAAE,GAAG,EAAE,aAAa,EAAE,KAAK,EAAE,eAAe,CAAC,gBAAgB,EAAE;QAC/D,EAAE,GAAG,EAAE,aAAa,EAAE,KAAK,EAAE,eAAe,CAAC,QAAQ,EAAE;QACvD;YACE,GAAG,EAAE,sBAAsB,EAAE,KAAK,EAAE,UAAU,CAAC,oBAAoB,CAAC,8BAA8B,CAChG,wBAAwB,CACtB,eAAe,CAAC,cAAc,EAC9B,eAAe,CAAC,UAAU,EAC1B,gBAAgB,EAChB,kBAAkB,EAClB,wBAAwB,EACxB,MAAM,EACN,+BAA+B,CAChC,EACD,aAAa,CACd,CAAC,GAAG,CAAC,CAAC;SACR;QACD,EAAE,GAAG,EAAE,sBAAsB,EAAE,KAAK,EAAE,iBAAiB,EAAE;QACzD,EAAE,GAAG,EAAE,wBAAwB,EAAE,KAAK,EAAE,mBAAmB,EAAE;QAC7D,EAAE,GAAG,EAAE,eAAe,EAAE,KAAK,EAAE,eAAe,CAAC,aAAa,EAAE;KAC/D,CAAC,CAAC;IAEH,OAAO,mBAAmB,CACxB,eAAe,CAAC,cAAc,EAC9B,eAAe,CAAC,UAAU,EAC1B,eAAe,CAAC,QAAQ,EACxB,eAAe,CAAC,aAAa,EAC7B,eAAe,CAAC,gBAAgB,EAChC,gBAAgB,EAChB,kBAAkB,EAClB,wBAAwB,EACxB,MAAM,CACP,CAAC;AACJ,CAAC"}
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* This file provides shared utility functions for constrained optimization algorithms
|
|
3
|
+
* using the adjoint method.
|
|
4
|
+
*
|
|
5
|
+
* Role in system:
|
|
6
|
+
* - Eliminates code duplication between adjointGradientDescent, constrainedGaussNewton, and constrainedLevenbergMarquardt
|
|
7
|
+
* - Centralizes adjoint method computation logic (DRY principle)
|
|
8
|
+
* - Provides reusable functions for state updates and constraint handling
|
|
9
|
+
*
|
|
10
|
+
* For first-time readers:
|
|
11
|
+
* - These are utility functions used internally by constrained optimization algorithms
|
|
12
|
+
* - solveAdjointEquation: Solves the adjoint equation for computing gradients
|
|
13
|
+
* - updateStates: Updates states using linear approximation to maintain constraint satisfaction
|
|
14
|
+
* - validateInitialConditions: Validates initial states and constraints
|
|
15
|
+
*
|
|
16
|
+
* Extracted from adjointGradientDescent.ts to enable code reuse.
|
|
17
|
+
*/
|
|
18
|
+
import { Matrix } from 'ml-matrix';
|
|
19
|
+
import type { ConstraintFn } from './types.js';
|
|
20
|
+
import { Logger } from './logger.js';
|
|
21
|
+
/**
|
|
22
|
+
* Solves a least squares problem Ax = b using Cholesky decomposition.
|
|
23
|
+
* For square matrices, uses Cholesky/LU decomposition directly.
|
|
24
|
+
* For non-square matrices, uses normal equations to convert to square system:
|
|
25
|
+
* - Overdetermined (rows > columns): A^T A x = A^T b (Cholesky on A^T A)
|
|
26
|
+
* - Underdetermined (rows < columns): A A^T y = b, x = A^T y (Cholesky on A A^T)
|
|
27
|
+
* This approach avoids SVD/pseudoInverse entirely for better performance.
|
|
28
|
+
*
|
|
29
|
+
* @param A - Coefficient matrix
|
|
30
|
+
* @param b - Right-hand side vector (as Matrix column vector)
|
|
31
|
+
* @param logger - Logger for error messages
|
|
32
|
+
* @param algorithmName - Name of calling algorithm (for error messages)
|
|
33
|
+
* @returns Solution vector x as Float64Array
|
|
34
|
+
*/
|
|
35
|
+
export declare function solveLeastSquares(A: Matrix, b: Matrix, logger: Logger, algorithmName?: string, regularization?: number): Float64Array;
|
|
36
|
+
/**
|
|
37
|
+
* Solves the adjoint equation: (∂c/∂x)^T λ = rhs
|
|
38
|
+
* Returns the adjoint variable λ.
|
|
39
|
+
* Supports both square and non-square constraint Jacobians.
|
|
40
|
+
*
|
|
41
|
+
* This is the core of the adjoint method, used for efficient gradient computation
|
|
42
|
+
* without explicitly inverting matrices.
|
|
43
|
+
*
|
|
44
|
+
* @param dcdx - Constraint Jacobian ∂c/∂x
|
|
45
|
+
* @param rhs - Right-hand side vector (e.g., (∂f/∂x)^T or (r_x^T r))
|
|
46
|
+
* @param logger - Logger instance for error reporting
|
|
47
|
+
* @param algorithmName - Name of calling algorithm (for error messages)
|
|
48
|
+
* @returns Adjoint variable λ
|
|
49
|
+
*/
|
|
50
|
+
export declare function solveAdjointEquation(dcdx: Matrix, rhs: Float64Array, logger: Logger, algorithmName?: string, regularization?: number): Float64Array;
|
|
51
|
+
/**
|
|
52
|
+
* Updates states using linear approximation: x_new = x_old + dx
|
|
53
|
+
* where dx solves (∂c/∂x) dx = -∂c/∂p · Δp
|
|
54
|
+
* Supports both square and non-square constraint Jacobians.
|
|
55
|
+
*
|
|
56
|
+
* This maintains constraint satisfaction approximately using first-order Taylor expansion.
|
|
57
|
+
* For large steps, constraints may be violated slightly, but the algorithm will correct
|
|
58
|
+
* this in subsequent iterations.
|
|
59
|
+
*
|
|
60
|
+
* @param currentStates - Current state vector x
|
|
61
|
+
* @param dcdx - Constraint Jacobian ∂c/∂x
|
|
62
|
+
* @param dcdp - Constraint Jacobian ∂c/∂p
|
|
63
|
+
* @param deltaP - Parameter change Δp
|
|
64
|
+
* @param logger - Logger instance for error reporting
|
|
65
|
+
* @param algorithmName - Name of calling algorithm (for error messages)
|
|
66
|
+
* @returns Updated state vector x_new
|
|
67
|
+
*/
|
|
68
|
+
export declare function updateStates(currentStates: Float64Array, dcdx: Matrix, dcdp: Matrix, deltaP: Float64Array, logger: Logger, algorithmName?: string): Float64Array;
|
|
69
|
+
/**
|
|
70
|
+
* Projects states onto the constraint manifold for fixed parameters using
|
|
71
|
+
* a few Newton correction steps: (∂c/∂x) Δx = -c(p, x).
|
|
72
|
+
* This is a standard feasibility-restoration step consistent with the
|
|
73
|
+
* implicit function theorem (solving c(p, x) = 0 locally).
|
|
74
|
+
*/
|
|
75
|
+
export declare function projectStatesToConstraints(parameters: Float64Array, states: Float64Array, constraintFunction: ConstraintFn, stepSizeX: number, constraintTolerance: number, logger: Logger, algorithmName?: string, maxIterations?: number): Float64Array;
|
|
76
|
+
/**
|
|
77
|
+
* Validates initial conditions including constraint satisfaction and dimensions.
|
|
78
|
+
*
|
|
79
|
+
* Checks that:
|
|
80
|
+
* 1. Constraint count equals state count (required for adjoint method)
|
|
81
|
+
* 2. Initial constraint violation is within tolerance (warns if not)
|
|
82
|
+
*
|
|
83
|
+
* @param initialParameters - Initial parameter vector p0
|
|
84
|
+
* @param initialStates - Initial state vector x0
|
|
85
|
+
* @param constraintFunction - Constraint function c(p, x) = 0
|
|
86
|
+
* @param constraintTolerance - Tolerance for constraint violation
|
|
87
|
+
* @param logger - Logger instance for warnings
|
|
88
|
+
* @param algorithmName - Name of calling algorithm (for error messages)
|
|
89
|
+
* @throws Error if constraint count != state count
|
|
90
|
+
*/
|
|
91
|
+
export declare function validateInitialConditions(initialParameters: Float64Array, initialStates: Float64Array, constraintFunction: ConstraintFn, constraintTolerance: number, logger: Logger, algorithmName?: string): void;
|
|
92
|
+
//# sourceMappingURL=constrainedUtils.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"constrainedUtils.d.ts","sourceRoot":"","sources":["../../src/core/constrainedUtils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;GAgBG;AAEH,OAAO,EAAE,MAAM,EAAgC,MAAM,WAAW,CAAC;AACjE,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,YAAY,CAAC;AAG/C,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAC;AAyQrC;;;;;;;;;;;;;GAaG;AACH,wBAAgB,iBAAiB,CAC/B,CAAC,EAAE,MAAM,EACT,CAAC,EAAE,MAAM,EACT,MAAM,EAAE,MAAM,EACd,aAAa,GAAE,MAAkC,EACjD,cAAc,GAAE,MAAU,GACzB,YAAY,CAgBd;AAED;;;;;;;;;;;;;GAaG;AACH,wBAAgB,oBAAoB,CAClC,IAAI,EAAE,MAAM,EACZ,GAAG,EAAE,YAAY,EACjB,MAAM,EAAE,MAAM,EACd,aAAa,GAAE,MAAkC,EACjD,cAAc,GAAE,MAAU,GACzB,YAAY,CAkBd;AAED;;;;;;;;;;;;;;;;GAgBG;AACH,wBAAgB,YAAY,CAC1B,aAAa,EAAE,YAAY,EAC3B,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,MAAM,EACZ,MAAM,EAAE,YAAY,EACpB,MAAM,EAAE,MAAM,EACd,aAAa,GAAE,MAAkC,GAChD,YAAY,CAcd;AAED;;;;;GAKG;AACH,wBAAgB,0BAA0B,CACxC,UAAU,EAAE,YAAY,EACxB,MAAM,EAAE,YAAY,EACpB,kBAAkB,EAAE,YAAY,EAChC,SAAS,EAAE,MAAM,EACjB,mBAAmB,EAAE,MAAM,EAC3B,MAAM,EAAE,MAAM,EACd,aAAa,GAAE,MAAkC,EACjD,aAAa,GAAE,MAAU,GACxB,YAAY,CAyBd;AAED;;;;;;;;;;;;;;GAcG;AACH,wBAAgB,yBAAyB,CACvC,iBAAiB,EAAE,YAAY,EAC/B,aAAa,EAAE,YAAY,EAC3B,kBAAkB,EAAE,YAAY,EAChC,mBAAmB,EAAE,MAAM,EAC3B,MAAM,EAAE,MAAM,EACd,aAAa,GAAE,MAAkC,GAChD,IAAI,CAYN"}
|