hdim-opt 1.1.3__py3-none-any.whl → 1.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
hdim_opt/__init__.py CHANGED
@@ -1,7 +1,7 @@
1
1
  # hdim_opt/__init__.py
2
2
 
3
3
  # package version
4
- __version__ = "1.1.3"
4
+ __version__ = "1.2.1"
5
5
  __all__ = ['quasar', 'hds', 'sobol', 'sensitivity', 'test_functions', 'quasar_helpers'] # available for star imports
6
6
 
7
7
  # import core components
@@ -553,7 +553,7 @@ def sample(n_samples, bounds,
553
553
  # dark visualization parameters for better sample visuals
554
554
 
555
555
  # samples
556
- fig, ax = plt.subplots(1,2,figsize=(11,5.5))
556
+ fig, ax = plt.subplots(1,2,figsize=(9,5))
557
557
 
558
558
  ax[0].scatter(hds_sequence_plot[:, 0], hds_sequence_plot[:, 1], s=0.67, zorder=5, color='deepskyblue',
559
559
  label='HDS Samples')
@@ -260,14 +260,13 @@ def evolve_generation(obj_function, population, fitnesses, best_solution,
260
260
 
261
261
  return new_population, new_fitnesses
262
262
 
263
- def covariance_reinit(population, current_fitnesses, bounds, vectorized):
263
+ def asym_reinit(population, current_fitnesses, bounds, reinit_method, seed, vectorized):
264
264
  '''
265
265
  Objective:
266
266
  - Reinitializes the worst 33% solutions in the population.
267
- - Locations are determined based on a Gaussian distribution from the covariance matrix of 25% best solutions.
268
- - Noise is added to enhance diversity.
269
- - Probability decays to 33% at the 33% generation.
270
- - Conceptualizes particles tunneling to a more stable location.
267
+ - Locations are determined based on either:
268
+ - 'covariance' (default): Gaussian distribution from the covariance of 25% best solutions (exploitation).
269
+ - 'sobol': Uniformly Sobol distributed within the bounds (exploration).
271
270
  '''
272
271
 
273
272
  # reshape depending on vectorized input
@@ -276,60 +275,90 @@ def covariance_reinit(population, current_fitnesses, bounds, vectorized):
276
275
  else:
277
276
  popsize, dimensions = population.shape
278
277
 
279
- # handle case where not enough points for covariance matrix
278
+ # handle case where not enough points for reliable covariance matrix
280
279
  if popsize < dimensions + 1:
281
280
  return population
282
-
283
- # keep 25% of best solutions
284
- num_to_keep_factor = 0.25
285
- num_to_keep = int(popsize * num_to_keep_factor)
286
- if num_to_keep <= dimensions:
287
- num_to_keep = dimensions + 1 # minimum sample size scaled by dimensions
288
-
289
- # identify best solutions to calculate covariance gaussian model
290
- sorted_indices = np.argsort(current_fitnesses)
291
- best_indices = sorted_indices[:num_to_keep]
292
- if vectorized:
293
- best_solutions = population[:, best_indices]
294
- else:
295
- best_solutions = population[best_indices]
296
281
 
297
- # learn full-covariance matrix
298
- if vectorized:
299
- mean_vector = np.mean(best_solutions, axis=1)
300
- cov_matrix = np.cov(best_solutions)
301
- else:
302
- mean_vector = np.mean(best_solutions, axis=0)
303
- cov_matrix = np.cov(best_solutions, rowvar=False)
304
-
305
- # add epsilon to the diagonal to prevent singular matrix issues
306
- cov_matrix += np.eye(dimensions) * epsilon
307
-
308
282
  # identify solutions to be reset
309
283
  reset_population = 0.33
310
284
  num_to_replace = int(popsize * reset_population)
285
+ if num_to_replace == 0:
286
+ return population
287
+
288
+ sorted_indices = np.argsort(current_fitnesses)
311
289
  worst_indices = sorted_indices[-num_to_replace:]
290
+
291
+ # initializing new solutions
292
+ new_solutions = None
293
+
294
+ # covariance reinitalization; exploitation
295
+ if reinit_method == 'covariance':
296
+
297
+ # keep 25% of best solutions
298
+ num_to_keep_factor = 0.25
299
+ num_to_keep = int(popsize * num_to_keep_factor)
300
+ if num_to_keep <= dimensions:
301
+ num_to_keep = dimensions + 1 # minimum sample size scaled by dimensions
302
+
303
+ # identify best solutions to calculate covariance gaussian model
304
+ best_indices = sorted_indices[:num_to_keep]
305
+ if vectorized:
306
+ best_solutions = population[:, best_indices]
307
+ else:
308
+ best_solutions = population[best_indices]
309
+
310
+ # learn full-covariance matrix
311
+ if vectorized:
312
+ mean_vector = np.mean(best_solutions, axis=1)
313
+ cov_matrix = np.cov(best_solutions)
314
+ else:
315
+ mean_vector = np.mean(best_solutions, axis=0)
316
+ cov_matrix = np.cov(best_solutions, rowvar=False)
312
317
 
313
- # new solutions sampled from multivariate normal distribution
314
- if vectorized:
315
- new_solutions_sampled = np.random.multivariate_normal(mean=mean_vector, cov=cov_matrix, size=num_to_replace).T
316
- else:
318
+ # add epsilon to the diagonal to prevent singular matrix issues
319
+ cov_matrix += np.eye(dimensions) * epsilon
320
+
321
+ # new solutions sampled from multivariate normal distribution
317
322
  new_solutions_sampled = np.random.multivariate_normal(mean=mean_vector, cov=cov_matrix, size=num_to_replace)
318
-
319
- # add noise for exploration
320
- noise_scale = (bounds[:, 1] - bounds[:, 0]) / 20.0
321
- if vectorized:
322
- noise = np.random.normal(0, noise_scale[:, np.newaxis], size=new_solutions_sampled.shape)
323
- new_solutions = new_solutions_sampled + noise
324
- else:
325
- noise = np.random.normal(0, noise_scale, size=new_solutions_sampled.shape)
326
- new_solutions = new_solutions_sampled + noise
323
+
324
+ # add noise for exploration
325
+ noise_scale = (bounds[:, 1] - bounds[:, 0]) / 20.0
327
326
 
328
- # clip new solutions to bounds
329
- if vectorized:
330
- population[:, worst_indices] = np.clip(new_solutions, bounds[:, np.newaxis, 0], bounds[:, np.newaxis, 1])
331
- else:
332
- population[worst_indices] = np.clip(new_solutions, bounds[:, 0], bounds[:, 1])
327
+ # reshape
328
+ if vectorized:
329
+ new_solutions_sampled = new_solutions_sampled.T
330
+ noise = np.random.normal(0, noise_scale[:, np.newaxis], size=new_solutions_sampled.shape)
331
+ new_solutions = new_solutions_sampled + noise
332
+ else:
333
+ noise = np.random.normal(0, noise_scale, size=new_solutions_sampled.shape)
334
+ new_solutions = new_solutions_sampled + noise
335
+
336
+ # sobol reinitialization (high exploration)
337
+ elif reinit_method == 'sobol':
338
+
339
+ # generate sobol samples
340
+ sobol_sampler = stats.qmc.Sobol(d=dimensions, seed=seed)
341
+ sobol_samples_unit = sobol_sampler.random(n=num_to_replace)
342
+
343
+ bounds_low = bounds[:, 0]
344
+ bounds_high = bounds[:, 1]
345
+ scaled_samples = stats.qmc.scale(sobol_samples_unit, bounds_low, bounds_high)
346
+
347
+ # reshape
348
+ if vectorized:
349
+ new_solutions = scaled_samples.T
350
+ else:
351
+ new_solutions = scaled_samples
352
+
353
+
354
+ # update the selected worst indices population
355
+ if new_solutions is not None:
356
+ if vectorized:
357
+ population[:, worst_indices] = np.clip(new_solutions,
358
+ bounds[:, np.newaxis, 0],
359
+ bounds[:, np.newaxis, 1])
360
+ else:
361
+ population[worst_indices] = np.clip(new_solutions, bounds[:, 0], bounds[:, 1])
333
362
 
334
363
  return population
335
364
 
@@ -342,7 +371,7 @@ def optimize(func, bounds, args=(),
342
371
  patience=np.inf, vectorized=False,
343
372
  hds_weights=None, kwargs={},
344
373
  constraints=None, constraint_penalty=1e9,
345
- reinitialization=True,
374
+ reinitialization=True, reinitialization_method='covariance',
346
375
  verbose=True, plot_solutions=True, num_to_plot=10, plot_contour=True,
347
376
  workers=1, seed=None
348
377
  ):
@@ -351,7 +380,7 @@ def optimize(func, bounds, args=(),
351
380
  - Finds the optimal solution for a given objective function.
352
381
  - Designed for non-differentiable, high-dimensional problems.
353
382
  - Test functions available for local testing, called as hdim_opt.test_functions.function_name.
354
- - Existing test functions: [rastrigin, ackley, sinusoid, sphere]
383
+ - Existing test functions: [rastrigin, ackley, sinusoid, sphere, shubert].
355
384
 
356
385
  Inputs:
357
386
  - func: Objective function to minimize.
@@ -400,9 +429,13 @@ def optimize(func, bounds, args=(),
400
429
  }
401
430
  - constraint_penalty: Penalty applied to each constraint violated, defaults to 1e12.
402
431
 
403
- - covariance_reinit: Boolean to disable covariance reinitialization if needed.
432
+ - reinitialization: Boolean to disable covariance reinitialization if needed.
404
433
  - For cases where the population size is computationally prohibitive.
405
434
  - Disabled by default for 1D problems.
435
+ - reinitialization_method: Type of re-sampling to use in the asymptotic reinitialization.
436
+ - Options are ['covariance', 'sobol'].
437
+ - 'covariance' (exploitative) is default for most problems.
438
+ - 'sobol' (explorative) is optional, for high exploration and faster computation.
406
439
 
407
440
  - verbose: Displays prints and plots.
408
441
  - Mutation factor distribution shown with hdim_opt.test_functions.plot_mutations()
@@ -612,7 +645,7 @@ def optimize(func, bounds, args=(),
612
645
  else:
613
646
  reinit_proba = 0.0
614
647
  if np.random.rand() < reinit_proba:
615
- population = covariance_reinit(population, current_fitnesses, bounds, vectorized=vectorized)
648
+ population = asym_reinit(population, current_fitnesses, bounds, reinitialization_method, seed, vectorized=vectorized)
616
649
 
617
650
  # clip population to bounds
618
651
  if vectorized:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: hdim_opt
3
- Version: 1.1.3
3
+ Version: 1.2.1
4
4
  Summary: Optimization toolkit for high-dimensional, non-differentiable problems.
5
5
  Author-email: Julian Soltes <jsoltes@regis.edu>
6
6
  License: MIT
@@ -0,0 +1,11 @@
1
+ hdim_opt/__init__.py,sha256=GyHC0235o4gsDdmZp4_mf9HTH2wURh77q490EKMe4As,477
2
+ hdim_opt/hyperellipsoid_sampling.py,sha256=c34JkVciZbdAXjdfNjfC4h5NsrT2CD7Epsxpef5a1xY,24625
3
+ hdim_opt/quasar_helpers.py,sha256=zTgar2EuWs4MLSLEO7HRcP7At1xbXLP3q4Gg7-GrggQ,14799
4
+ hdim_opt/quasar_optimization.py,sha256=PYYg7MGQcjddLq86tivImw0kgmpeIRT7TfKRGC7QOBE,31979
5
+ hdim_opt/sobol_sampling.py,sha256=Xe_Zzs13xMxCben17gT85lFsoV-GKVOAAgi7lMxnlBI,912
6
+ hdim_opt/sobol_sensitivity.py,sha256=1ebeDSTmcLn03_MKDGiyJJ7r_ZSNCq2AKNcTX-hI23A,4384
7
+ hdim_opt/test_functions.py,sha256=RqjKYIiwAqWplGUsH4oPHLBrVdnLRyw7f0dJX5iyJ4g,2821
8
+ hdim_opt-1.2.1.dist-info/METADATA,sha256=PG6QUQ8PH-zJZxXjDXbFedHHp2oMHWKrtzfgDbaJBmY,3130
9
+ hdim_opt-1.2.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
10
+ hdim_opt-1.2.1.dist-info/top_level.txt,sha256=1KtWo9tEfEK3GC8D43cwVsC8yVG2Kc-9pl0hhcDjw4o,9
11
+ hdim_opt-1.2.1.dist-info/RECORD,,
@@ -1,11 +0,0 @@
1
- hdim_opt/__init__.py,sha256=rQp14fICPy_GoZrp-sDBopItLr3yR20C7-3U4tgQxEc,477
2
- hdim_opt/hyperellipsoid_sampling.py,sha256=QmbpFnZheCkwR5FOWNQS5C-FxDiNVKG97JPLqYH8G5I,24628
3
- hdim_opt/quasar_helpers.py,sha256=zTgar2EuWs4MLSLEO7HRcP7At1xbXLP3q4Gg7-GrggQ,14799
4
- hdim_opt/quasar_optimization.py,sha256=NOee1-e7mj3zUvbf-HI4a-JGrOwflG0f87HCVymANWA,30578
5
- hdim_opt/sobol_sampling.py,sha256=Xe_Zzs13xMxCben17gT85lFsoV-GKVOAAgi7lMxnlBI,912
6
- hdim_opt/sobol_sensitivity.py,sha256=1ebeDSTmcLn03_MKDGiyJJ7r_ZSNCq2AKNcTX-hI23A,4384
7
- hdim_opt/test_functions.py,sha256=RqjKYIiwAqWplGUsH4oPHLBrVdnLRyw7f0dJX5iyJ4g,2821
8
- hdim_opt-1.1.3.dist-info/METADATA,sha256=XP4iX4mdgSVX1dAyW3AwNTn5YVfmbdwMN4-tZdjMBfI,3130
9
- hdim_opt-1.1.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
10
- hdim_opt-1.1.3.dist-info/top_level.txt,sha256=1KtWo9tEfEK3GC8D43cwVsC8yVG2Kc-9pl0hhcDjw4o,9
11
- hdim_opt-1.1.3.dist-info/RECORD,,