@datagrok/eda 1.4.12 → 1.4.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -282,10 +282,14 @@ export namespace funcs {
282
282
  return await grok.functions.call('EDA:TrainPmpo', {});
283
283
  }
284
284
 
285
+ export async function getPmpoAppItems(view: DG.View ): Promise<any> {
286
+ return await grok.functions.call('EDA:GetPmpoAppItems', { view });
287
+ }
288
+
285
289
  /**
286
- Apply trained probabilistic multi-parameter optimization (pMPO) model to score samples
290
+ Generates syntethetic dataset oriented on the pMPO modeling
287
291
  */
288
- export async function applyPmpo(table: DG.DataFrame , file: DG.FileInfo ): Promise<void> {
289
- return await grok.functions.call('EDA:ApplyPmpo', { table, file });
292
+ export async function generatePmpoDataset(samples: number ): Promise<DG.DataFrame> {
293
+ return await grok.functions.call('EDA:GeneratePmpoDataset', { samples });
290
294
  }
291
295
  }
@@ -1,10 +1,13 @@
1
1
  import * as DG from 'datagrok-api/dg';
2
- import {runTests, tests, TestContext, initAutoTests as initTests} from '@datagrok-libraries/utils/src/test';
2
+ import {runTests, tests, TestContext, initAutoTests as initTests} from '@datagrok-libraries/test/src/test';
3
3
  import './tests/dim-reduction-tests';
4
4
  import './tests/linear-methods-tests';
5
5
  import './tests/classifiers-tests';
6
6
  import './tests/mis-vals-imputation-tests';
7
7
  import './tests/anova-tests';
8
+ import './tests/pmpo-tests';
9
+ import './tests/pareto-tests';
10
+
8
11
  export const _package = new DG.Package();
9
12
  export {tests};
10
13
 
package/src/package.g.ts CHANGED
@@ -7,6 +7,7 @@ export function info() : void {
7
7
  }
8
8
 
9
9
  //tags: init
10
+ //meta.role: init
10
11
  export async function init() : Promise<void> {
11
12
  await PackageFunctions.init();
12
13
  }
@@ -37,34 +38,37 @@ export async function PCA(table: DG.DataFrame, features: DG.ColumnList, componen
37
38
  }
38
39
 
39
40
  //name: DBSCAN clustering
41
+ //tags: dim-red-postprocessing-function
40
42
  //input: column col1
41
43
  //input: column col2
42
44
  //input: double epsilon = 0.01 { description: Minimum distance between two points to be considered as in the same neighborhood. }
43
45
  //input: int minimumPoints = 5 { description: Minimum number of points to form a dense region. }
44
46
  //meta.defaultPostProcessingFunction: true
45
- //meta.role: dimRedPostprocessingFunction
47
+ //meta.role: dim-red-postprocessing-function
46
48
  export async function dbscanPostProcessingFunction(col1: DG.Column, col2: DG.Column, epsilon: number, minimumPoints: number) : Promise<void> {
47
49
  await PackageFunctions.dbscanPostProcessingFunction(col1, col2, epsilon, minimumPoints);
48
50
  }
49
51
 
50
52
  //name: None (number)
53
+ //tags: dim-red-preprocessing-function
51
54
  //input: column col
52
55
  //input: string _metric { optional: true }
53
56
  //output: object result
54
57
  //meta.supportedTypes: int,float,double,qnum
55
58
  //meta.supportedDistanceFunctions: Difference
56
- //meta.role: dimRedPreprocessingFunction
59
+ //meta.role: dim-red-preprocessing-function
57
60
  export function numberPreprocessingFunction(col: DG.Column, _metric: string) {
58
61
  return PackageFunctions.numberPreprocessingFunction(col, _metric);
59
62
  }
60
63
 
61
64
  //name: None (string)
65
+ //tags: dim-red-preprocessing-function
62
66
  //input: column col
63
67
  //input: string _metric { optional: true }
64
68
  //output: object result
65
69
  //meta.supportedTypes: string
66
70
  //meta.supportedDistanceFunctions: One-Hot,Levenshtein,Hamming
67
- //meta.role: dimRedPreprocessingFunction
71
+ //meta.role: dim-red-preprocessing-function
68
72
  export function stringPreprocessingFunction(col: DG.Column, _metric: string) {
69
73
  return PackageFunctions.stringPreprocessingFunction(col, _metric);
70
74
  }
@@ -77,6 +81,7 @@ export async function reduceDimensionality() : Promise<void> {
77
81
 
78
82
  //tags: editor
79
83
  //input: funccall call
84
+ //meta.role: editor
80
85
  export function GetMCLEditor(call: DG.FuncCall) : void {
81
86
  PackageFunctions.GetMCLEditor(call);
82
87
  }
@@ -102,6 +107,7 @@ export async function MCLClustering(df: DG.DataFrame, cols: DG.Column[], metrics
102
107
 
103
108
  //name: MCL
104
109
  //description: Markov clustering viewer
110
+ //tags: viewer
105
111
  //output: viewer result
106
112
  //meta.showInGallery: false
107
113
  //meta.role: viewer
@@ -532,6 +538,7 @@ export function paretoFront() : void {
532
538
 
533
539
  //name: Pareto front
534
540
  //description: Pareto front viewer
541
+ //tags: viewer
535
542
  //output: viewer result
536
543
  //meta.icon: icons/pareto-front-viewer.svg
537
544
  //meta.role: viewer
@@ -540,14 +547,19 @@ export function paretoFrontViewer() : any {
540
547
  }
541
548
 
542
549
  //description: Train probabilistic multi-parameter optimization (pMPO) model
543
- //top-menu: Chem | Calculate | Train pMPO...
544
550
  export function trainPmpo() : void {
545
551
  PackageFunctions.trainPmpo();
546
552
  }
547
553
 
548
- //description: Apply trained probabilistic multi-parameter optimization (pMPO) model to score samples
549
- //input: dataframe table
550
- //input: file file
551
- export async function applyPmpo(table: DG.DataFrame, file: DG.FileInfo) : Promise<void> {
552
- await PackageFunctions.applyPmpo(table, file);
554
+ //input: view view
555
+ //output: object result
556
+ export function getPmpoAppItems(view: any) : any {
557
+ return PackageFunctions.getPmpoAppItems(view);
558
+ }
559
+
560
+ //description: Generates syntethetic dataset oriented on the pMPO modeling
561
+ //input: int samples
562
+ //output: dataframe Synthetic
563
+ export async function generatePmpoDataset(samples: number) : Promise<any> {
564
+ return await PackageFunctions.generatePmpoDataset(samples);
553
565
  }
package/src/package.ts CHANGED
@@ -36,10 +36,12 @@ import {SoftmaxClassifier} from './softmax-classifier';
36
36
 
37
37
  import {initXgboost} from '../wasm/xgbooster';
38
38
  import {XGBooster} from './xgbooster';
39
+
39
40
  import {ParetoOptimizer} from './pareto-optimization/pareto-optimizer';
40
41
  import {ParetoFrontViewer} from './pareto-optimization/pareto-front-viewer';
42
+
41
43
  import {Pmpo} from './probabilistic-scoring/prob-scoring';
42
- import {loadPmpoParams} from './probabilistic-scoring/pmpo-utils';
44
+ import {getSynteticPmpoData} from './probabilistic-scoring/data-generator';
43
45
 
44
46
  export const _package = new DG.Package();
45
47
  export * from './package.g';
@@ -53,7 +55,7 @@ export class PackageFunctions {
53
55
  }
54
56
 
55
57
 
56
- @grok.decorators.init({})
58
+ @grok.decorators.init({tags: ['init']})
57
59
  static async init(): Promise<void> {
58
60
  await _initEDAAPI();
59
61
  await initXgboost();
@@ -115,8 +117,9 @@ export class PackageFunctions {
115
117
 
116
118
 
117
119
  @grok.decorators.func({
118
- 'meta': {'defaultPostProcessingFunction': 'true', role: 'dimRedPostprocessingFunction'},
120
+ 'meta': {'defaultPostProcessingFunction': 'true', 'role': 'dim-red-postprocessing-function'},
119
121
  'name': 'DBSCAN clustering',
122
+ 'tags': ['dim-red-postprocessing-function'],
120
123
  })
121
124
  static async dbscanPostProcessingFunction(
122
125
  col1: DG.Column,
@@ -145,9 +148,10 @@ export class PackageFunctions {
145
148
  'meta': {
146
149
  'supportedTypes': 'int,float,double,qnum',
147
150
  'supportedDistanceFunctions': 'Difference',
148
- 'role': 'dimRedPreprocessingFunction'
151
+ 'role': 'dim-red-preprocessing-function',
149
152
  },
150
153
  'name': 'None (number)',
154
+ 'tags': ['dim-red-preprocessing-function'],
151
155
  'outputs': [{name: 'result', type: 'object'}],
152
156
  })
153
157
  static numberPreprocessingFunction(
@@ -163,8 +167,9 @@ export class PackageFunctions {
163
167
  'meta': {
164
168
  'supportedTypes': 'string',
165
169
  'supportedDistanceFunctions': 'One-Hot,Levenshtein,Hamming',
166
- 'role': 'dimRedPreprocessingFunction'
170
+ 'role': 'dim-red-preprocessing-function',
167
171
  },
172
+ 'tags': ['dim-red-preprocessing-function'],
168
173
  'name': 'None (string)',
169
174
  'outputs': [{name: 'result', type: 'object'}],
170
175
  })
@@ -219,7 +224,7 @@ export class PackageFunctions {
219
224
  }
220
225
 
221
226
 
222
- @grok.decorators.editor()
227
+ @grok.decorators.editor({tags: ['editor']})
223
228
  static GetMCLEditor(
224
229
  call: DG.FuncCall): void {
225
230
  try {
@@ -287,6 +292,7 @@ export class PackageFunctions {
287
292
  @grok.decorators.func({
288
293
  'outputs': [{'name': 'result', 'type': 'viewer'}],
289
294
  'meta': {showInGallery: 'false', role: 'viewer'},
295
+ 'tags': ['viewer'],
290
296
  'name': 'MCL',
291
297
  'description': 'Markov clustering viewer',
292
298
  })
@@ -979,14 +985,14 @@ export class PackageFunctions {
979
985
  'name': 'Pareto front',
980
986
  'description': 'Pareto front viewer',
981
987
  'outputs': [{'name': 'result', 'type': 'viewer'}],
982
- 'meta': {'icon': 'icons/pareto-front-viewer.svg', role: 'viewer'},
988
+ 'meta': {'icon': 'icons/pareto-front-viewer.svg', 'role': 'viewer'},
989
+ 'tags': ['viewer'],
983
990
  })
984
991
  static paretoFrontViewer(): DG.Viewer {
985
992
  return new ParetoFrontViewer();
986
993
  }
987
994
 
988
995
  @grok.decorators.func({
989
- 'top-menu': 'Chem | Calculate | Train pMPO...',
990
996
  'name': 'trainPmpo',
991
997
  'description': 'Train probabilistic multi-parameter optimization (pMPO) model',
992
998
  })
@@ -1004,22 +1010,25 @@ export class PackageFunctions {
1004
1010
  pMPO.runTrainingApp();
1005
1011
  }
1006
1012
 
1013
+ @grok.decorators.func({'name': 'getPmpoAppItems', 'outputs': [{name: 'result', type: 'object'}]})
1014
+ static getPmpoAppItems(@grok.decorators.param({type: 'view'}) view: DG.TableView): any | null {
1015
+ const df = view.dataFrame;
1016
+ if (!Pmpo.isTableValid(df))
1017
+ return null;
1018
+
1019
+ const pMPO = new Pmpo(df, view);
1020
+
1021
+ return pMPO.getPmpoAppItems();
1022
+ }
1023
+
1007
1024
  @grok.decorators.func({
1008
- //'top-menu': 'ML | Apply pMPO...',
1009
- 'name': 'applyPmpo',
1010
- 'description': 'Apply trained probabilistic multi-parameter optimization (pMPO) model to score samples',
1025
+ 'name': 'generatePmpoDataset',
1026
+ 'description': 'Generates syntethetic dataset oriented on the pMPO modeling',
1027
+ 'outputs': [{name: 'Synthetic', type: 'dataframe'}],
1011
1028
  })
1012
- static async applyPmpo(
1013
- @grok.decorators.param({'type': 'dataframe'}) table: DG.DataFrame,
1014
- @grok.decorators.param({'type': 'file'}) file: DG.FileInfo,
1015
- ): Promise<void> {
1016
- try {
1017
- const params = await loadPmpoParams(file);
1018
- const predName = table.columns.getUnusedName('pMPO score');
1019
- const prediction = Pmpo.predict(table, params, predName);
1020
- table.columns.add(prediction, true);
1021
- } catch (err) {
1022
- grok.shell.warning(`Failed to apply pMPO: ${err instanceof Error ? err.message : 'the platform issue.'}`);
1023
- }
1029
+ static async generatePmpoDataset(@grok.decorators.param({'type': 'int'}) samples: number): Promise<DG.DataFrame> {
1030
+ const df = await getSynteticPmpoData(samples);
1031
+ df.name = 'Synthetic';
1032
+ return df;
1024
1033
  }
1025
1034
  }
@@ -2,6 +2,12 @@
2
2
 
3
3
  import {NumericArray, OPT_TYPE} from './defs';
4
4
 
5
+ /** Computes the Pareto front mask for a given dataset and optimization sense
6
+ * @param rawData Array of numeric arrays representing the dataset (each array corresponds to a feature/dimension)
7
+ * @param sense Array of optimization types (OPT_TYPE.MIN or OPT_TYPE.MAX) for each dimension
8
+ * @param nPoints Number of data points in the dataset
9
+ * @param nullIndices Optional set of indices corresponding to missing values (these points will be marked as non-optimal)
10
+ * @returns Boolean array where true indicates that the point is on the Pareto front */
5
11
  export function getParetoMask(rawData: NumericArray[], sense: OPT_TYPE[], nPoints: number,
6
12
  nullIndices?: Set<number>): boolean[] {
7
13
  if (nPoints === 0)
@@ -0,0 +1,157 @@
1
+ import * as grok from 'datagrok-api/grok';
2
+ import * as ui from 'datagrok-api/ui';
3
+ import * as DG from 'datagrok-api/dg';
4
+
5
+ import {DescriptorStatistics, SOURCE_PATH, SYNTHETIC_DRUG_NAME} from './pmpo-defs';
6
+ import {getDescriptorStatistics, getDesiredTables} from './stat-tools';
7
+
8
+ //@ts-ignore: no types
9
+ import * as jStat from 'jstat';
10
+
11
+ /** Generates synthetic data for pMPO model training and testing
12
+ * @param samplesCount Number of samples to generate
13
+ * @returns DataFrame with generated data */
14
+ export async function getSynteticPmpoData(samplesCount: number): Promise<DG.DataFrame> {
15
+ const df = await grok.dapi.files.readCsv(SOURCE_PATH);
16
+ const generator = new PmpoDataGenerator(df, 'Drug', 'CNS', 'Smiles');
17
+
18
+ return generator.getGenerated(samplesCount);
19
+ }
20
+
21
+ /** Class for generating synthetic data for pMPO model training and testing */
22
+ export class PmpoDataGenerator {
23
+ private sourceDf: DG.DataFrame;
24
+ private drugName: string;
25
+ private desirabilityColName: string;
26
+ private smilesColName: string;
27
+ private desiredProbability: number;
28
+ private descriptorStats: Map<string, DescriptorStatistics>;
29
+
30
+ constructor(df: DG.DataFrame, drugName: string, desirabilityColName: string, smilesColName: string) {
31
+ this.sourceDf = df;
32
+ this.drugName = drugName;
33
+ this.desirabilityColName = desirabilityColName;
34
+ this.smilesColName = smilesColName;
35
+
36
+ const descriptorNames = df.columns.toList().filter((col) => col.isNumerical).map((col) => col.name);
37
+ const {desired, nonDesired} = getDesiredTables(df, df.col(desirabilityColName)!);
38
+
39
+ // Compute descriptors' statistics
40
+ this.descriptorStats = new Map<string, DescriptorStatistics>();
41
+ descriptorNames.forEach((name) => {
42
+ this.descriptorStats.set(name, getDescriptorStatistics(desired.col(name)!, nonDesired.col(name)!));
43
+ });
44
+
45
+ // Probability of desired class
46
+ this.desiredProbability = desired.rowCount / df.rowCount;
47
+ } // constructor
48
+
49
+ /** Generates synthetic data for pMPO model training and testing
50
+ * @param samplesCount Number of samples to generate
51
+ * @returns DataFrame with generated data */
52
+ public getGenerated(samplesCount: number): DG.DataFrame {
53
+ if (samplesCount <= 1)
54
+ throw new Error('Failed to generate pMPO data: sample count must be greater than 1.');
55
+
56
+ let result: DG.DataFrame;
57
+
58
+ /* Use rows from the source dataframe if the requested sample count
59
+ is less than or equal to the source dataframe row count */
60
+ if (samplesCount <= this.sourceDf.rowCount) {
61
+ const rowMask = DG.BitSet.create(this.sourceDf.rowCount);
62
+
63
+ for (let i = 0; i < samplesCount; ++i)
64
+ rowMask.set(i, true);
65
+
66
+ result = this.sourceDf.clone(rowMask);
67
+ } else {
68
+ const cloneDf = this.getClonedSourceDfWithFloatNumericCols();
69
+ result = cloneDf.append(this.getSyntheticTable(samplesCount - this.sourceDf.rowCount));
70
+ }
71
+
72
+ // Check boolean columns and ensure non-zero stdev
73
+ for (const col of result.columns) {
74
+ if (col.type === DG.COLUMN_TYPE.BOOL && col.stats.stdev === 0) {
75
+ // All values are the same, flip the first value
76
+ let value = col.get(0);
77
+ col.set(0, !value);
78
+
79
+ value = col.get(1);
80
+ col.set(1, !value);
81
+ }
82
+ }
83
+
84
+ return result;
85
+ } // getGenerated
86
+
87
+ /** Generates a synthetic data table
88
+ * @param samplesCount Number of samples to generate
89
+ * @returns DataFrame with synthetic data */
90
+ private getSyntheticTable(samplesCount: number): DG.DataFrame {
91
+ const desirabilityRaw = new Array<boolean>(samplesCount);
92
+
93
+ for (let i = 0; i < samplesCount; ++i)
94
+ desirabilityRaw[i] = (Math.random() < this.desiredProbability);
95
+
96
+
97
+ const cols = [
98
+ this.getDrugColumn(samplesCount),
99
+ this.getSmilesColumn(samplesCount),
100
+ DG.Column.fromList(DG.COLUMN_TYPE.BOOL, this.desirabilityColName, desirabilityRaw),
101
+ ];
102
+
103
+ this.descriptorStats.forEach((stat, name) => {
104
+ const arr = new Float32Array(samplesCount);
105
+
106
+ for (let i = 0; i < samplesCount; ++i) {
107
+ if (desirabilityRaw[i])
108
+ arr[i] = jStat.normal.sample(stat.desAvg, stat.desStd);
109
+ else
110
+ arr[i] = jStat.normal.sample(stat.nonDesAvg, stat.nonDesStd);
111
+ }
112
+
113
+ // @ts-ignore
114
+ cols.push(DG.Column.fromFloat32Array(name, arr));
115
+ });
116
+
117
+ return DG.DataFrame.fromColumns(cols);
118
+ } // getSyntheticTable
119
+
120
+ /** Generates a column with synthetic drug names
121
+ * @param samplesCount Number of samples to generate
122
+ * @returns Column with synthetic drug names */
123
+ private getDrugColumn(samplesCount: number): DG.Column<string> {
124
+ return DG.Column.fromList(
125
+ DG.COLUMN_TYPE.STRING,
126
+ this.drugName,
127
+ Array.from({length: samplesCount}, (_, i) => `${SYNTHETIC_DRUG_NAME} ${i + 1}`));
128
+ }
129
+
130
+ /** Generates a column with synthetic SMILES strings
131
+ * @param samplesCount Number of samples to generate
132
+ * @returns Column with synthetic SMILES strings */
133
+ private getSmilesColumn(samplesCount: number): DG.Column<string> {
134
+ return DG.Column.fromList(
135
+ DG.COLUMN_TYPE.STRING,
136
+ this.smilesColName,
137
+ Array.from({length: samplesCount}, () => 'C'));
138
+ }
139
+
140
+ /** Clones the source dataframe converting numerical columns to Float type
141
+ * @returns Cloned dataframe */
142
+ private getClonedSourceDfWithFloatNumericCols(): DG.DataFrame {
143
+ const cols: DG.Column[] = [];
144
+
145
+ this.sourceDf.columns.toList().forEach((col) => {
146
+ if (col.isNumerical)
147
+ cols.push(col.clone().convertTo(DG.COLUMN_TYPE.FLOAT));
148
+ else
149
+ cols.push(col.clone());
150
+ });
151
+
152
+ const clone = DG.DataFrame.fromColumns(cols);
153
+ clone.name = this.sourceDf.name;
154
+
155
+ return clone;
156
+ }
157
+ } // PmpoDataGenerator
@@ -0,0 +1,204 @@
1
+ import * as grok from 'datagrok-api/grok';
2
+ import * as ui from 'datagrok-api/ui';
3
+ import * as DG from 'datagrok-api/dg';
4
+
5
+ function getInitialParams(
6
+ objectiveFunc: (x: Float32Array) => number,
7
+ settings: Map<string, number>,
8
+ paramsInitial: Float32Array,
9
+ restrictionsBottom: Float32Array,
10
+ restrictionsTop: Float32Array): [Float32Array[], number[]] {
11
+ const dim = paramsInitial.length + 1;
12
+ const dimParams = paramsInitial.length;
13
+ const nonZeroParam = settings.get('nonZeroParam')!;
14
+ const initScale = settings.get('initialScale')!;
15
+
16
+ const optParams = new Array<Float32Array>(dim);
17
+ const pointObjectives = new Array<number>(dim);
18
+
19
+ for (let i = 0; i < dim; i++) {
20
+ optParams[i] = new Float32Array(dimParams);
21
+ for (let j = 0; j < dimParams; j++) {
22
+ optParams[i][j] = paramsInitial[j];
23
+ if (i != 0 && i - 1 === j) {
24
+ if (paramsInitial[j] == 0)
25
+ optParams[i][j] = nonZeroParam;
26
+ else
27
+ optParams[i][j] += initScale * paramsInitial[i - 1];
28
+
29
+ if (optParams[i][j] < restrictionsBottom[j])
30
+ optParams[i][j] = restrictionsBottom[j];
31
+ else if (optParams[i][j] > restrictionsTop[j])
32
+ optParams[i][j] = restrictionsTop[j];
33
+ }
34
+ }
35
+
36
+ pointObjectives[i] = objectiveFunc(optParams[i]);
37
+ }
38
+
39
+ return [optParams, pointObjectives];
40
+ } // getInitialParams
41
+
42
+ function fillCentroid(centroid: Float32Array, dimParams: number, lastIndex: number, optParams: Float32Array[]) {
43
+ for (let i = 0; i < dimParams; i++) {
44
+ let val = 0;
45
+ for (let j = 0; j < dimParams + 1; j++) {
46
+ if (j != lastIndex)
47
+ val += optParams[j][i];
48
+ }
49
+
50
+ centroid[i] = val / dimParams;
51
+ }
52
+ } // fillCentroid
53
+
54
+ function fillPoint(
55
+ centroid: Float32Array, point: Float32Array,
56
+ lastIndex: number, optParams: Float32Array[],
57
+ scale: number, dimParams: number,
58
+ restrictionsBottom: Float32Array,
59
+ restrictionsTop: Float32Array) {
60
+ for (let i = 0; i < dimParams; i++) {
61
+ point[i] = centroid[i];
62
+ point[i] += scale * (centroid[i] - optParams[lastIndex][i]);
63
+
64
+ if (point[i] < restrictionsBottom[i])
65
+ point[i] = restrictionsBottom[i];
66
+ else if (point[i] > restrictionsTop[i])
67
+ point[i] = restrictionsTop[i];
68
+ }
69
+ } // fillPoint
70
+
71
+ export async function optimizeNM(pi: DG.ProgressIndicator,
72
+ objectiveFunc: (x: Float32Array) => number, paramsInitial: Float32Array,
73
+ settings: Map<string, number>, restrictionsBottom: Float32Array, restrictionsTop: Float32Array) {
74
+ // Settings initialization
75
+ const tolerance = settings.get('tolerance')!;
76
+ const maxIter = settings.get('maxIter')!;
77
+ const scaleReflection = settings.get('scaleReflaction')!;
78
+ const scaleExpansion = settings.get('scaleExpansion')!;
79
+ const scaleContraction = settings.get('scaleContraction')!;
80
+
81
+ const dim = paramsInitial.length + 1;
82
+ const dimParams = paramsInitial.length;
83
+
84
+ const [optParams, pointObjectives] = getInitialParams(
85
+ objectiveFunc,
86
+ settings,
87
+ paramsInitial,
88
+ restrictionsBottom,
89
+ restrictionsTop,
90
+ );
91
+
92
+ const indexes = new Array<number>(dim);
93
+ for (let i = 0; i < dim; i++)
94
+ indexes[i] = i;
95
+
96
+ const lastIndex = indexes.length - 1;
97
+
98
+ let iteration = 0;
99
+ let best = 0;
100
+ let previousBest = 0;
101
+ let noImprovement = 0;
102
+
103
+ const centroid = new Float32Array(dimParams);
104
+ const reflectionPoint = new Float32Array(dimParams);
105
+ const expansionPoint = new Float32Array(dimParams);
106
+ const contractionPoint = new Float32Array(dimParams);
107
+ const costs = new Array<number>(maxIter);
108
+
109
+ if (dim > 1) {
110
+ let percentage = 0;
111
+
112
+ while (true) {
113
+ indexes.sort((a:number, b:number) => {
114
+ return pointObjectives[a] - pointObjectives[b];
115
+ });
116
+
117
+ percentage = Math.min(100, Math.floor(100 * (iteration) / maxIter));
118
+ pi.update(percentage, `Optimizing pMPO... (${percentage}%)`);
119
+ await new Promise((r) => setTimeout(r, 1));
120
+
121
+ if (pi.canceled)
122
+ break;
123
+
124
+ if (iteration > maxIter)
125
+ break;
126
+
127
+ if (iteration == 0) {
128
+ best = pointObjectives[0];
129
+ previousBest = 2*pointObjectives[indexes[0]];
130
+ }
131
+ costs[iteration] = best;
132
+
133
+ ++iteration;
134
+
135
+ best = pointObjectives[indexes[0]];
136
+ if (previousBest - best > tolerance)
137
+ noImprovement = 0;
138
+ else {
139
+ ++noImprovement;
140
+ if (noImprovement > 2 * dim)
141
+ break;
142
+ }
143
+
144
+ previousBest = best;
145
+
146
+ //centroid
147
+ fillCentroid(centroid, dimParams, indexes[lastIndex], optParams);
148
+
149
+ // reflection
150
+ fillPoint(centroid, reflectionPoint, indexes[lastIndex],
151
+ optParams, scaleReflection, dimParams, restrictionsBottom, restrictionsTop);
152
+ const reflectionScore = objectiveFunc(reflectionPoint);
153
+
154
+ // expansion
155
+ if (reflectionScore < pointObjectives[indexes[lastIndex]]) {
156
+ fillPoint(centroid, expansionPoint, indexes[lastIndex],
157
+ optParams, scaleExpansion, dimParams, restrictionsBottom, restrictionsTop);
158
+
159
+ const expansionScore = objectiveFunc(expansionPoint);
160
+
161
+ if (expansionScore < reflectionScore) {
162
+ pointObjectives[indexes[lastIndex]] = expansionScore;
163
+
164
+ for (let i = 0; i < dimParams; i++)
165
+ optParams[indexes[lastIndex]][i] = expansionPoint[i];
166
+
167
+ continue;
168
+ } else {
169
+ pointObjectives[indexes[lastIndex]] = reflectionScore;
170
+
171
+ for (let i = 0; i < dimParams; i++)
172
+ optParams[indexes[lastIndex]][i] = reflectionPoint[i];
173
+
174
+ continue;
175
+ }
176
+ }
177
+
178
+ // Contraction
179
+ fillPoint(centroid, contractionPoint, indexes[lastIndex],
180
+ optParams, scaleContraction, dimParams, restrictionsBottom, restrictionsTop);
181
+
182
+ const contractionScore = objectiveFunc(contractionPoint);
183
+
184
+ if (contractionScore < pointObjectives[indexes[lastIndex]]) {
185
+ pointObjectives[indexes[lastIndex]] = contractionScore;
186
+
187
+ for (let i = 0; i < dimParams; i++)
188
+ optParams[indexes[lastIndex]][i] = contractionPoint[i];
189
+
190
+ continue;
191
+ }
192
+
193
+ break;
194
+ } // while
195
+
196
+ for (let i = iteration; i < maxIter; i++)
197
+ costs[i] = pointObjectives[indexes[0]];
198
+ } // if
199
+
200
+ return {
201
+ optimalPoint: optParams[indexes[0]],
202
+ iterations: iteration,
203
+ };
204
+ }; // optimizeNM