pyfemtet 0.3.12__py3-none-any.whl → 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyfemtet might be problematic. Click here for more details.
- pyfemtet/FemtetPJTSample/NX_ex01/NX_ex01.py +1 -1
- pyfemtet/FemtetPJTSample/Sldworks_ex01/Sldworks_ex01.py +1 -1
- pyfemtet/FemtetPJTSample/gau_ex08_parametric.py +1 -1
- pyfemtet/FemtetPJTSample/her_ex40_parametric.femprj +0 -0
- pyfemtet/FemtetPJTSample/her_ex40_parametric.py +1 -1
- pyfemtet/FemtetPJTSample/wat_ex14_parallel_parametric.py +1 -1
- pyfemtet/FemtetPJTSample/wat_ex14_parametric.femprj +0 -0
- pyfemtet/FemtetPJTSample/wat_ex14_parametric.py +1 -1
- pyfemtet/__init__.py +1 -1
- pyfemtet/core.py +14 -0
- pyfemtet/dispatch_extensions.py +5 -0
- pyfemtet/opt/__init__.py +22 -2
- pyfemtet/opt/_femopt.py +544 -0
- pyfemtet/opt/_femopt_core.py +730 -0
- pyfemtet/opt/interface/__init__.py +15 -0
- pyfemtet/opt/interface/_base.py +71 -0
- pyfemtet/opt/{interface.py → interface/_femtet.py} +120 -407
- pyfemtet/opt/interface/_femtet_with_nx/__init__.py +3 -0
- pyfemtet/opt/interface/_femtet_with_nx/_interface.py +128 -0
- pyfemtet/opt/interface/_femtet_with_sldworks.py +174 -0
- pyfemtet/opt/opt/__init__.py +8 -0
- pyfemtet/opt/opt/_base.py +202 -0
- pyfemtet/opt/opt/_optuna.py +240 -0
- pyfemtet/opt/visualization/__init__.py +7 -0
- pyfemtet/opt/visualization/_graphs.py +222 -0
- pyfemtet/opt/visualization/_monitor.py +1149 -0
- {pyfemtet-0.3.12.dist-info → pyfemtet-0.4.1.dist-info}/METADATA +4 -4
- pyfemtet-0.4.1.dist-info/RECORD +38 -0
- {pyfemtet-0.3.12.dist-info → pyfemtet-0.4.1.dist-info}/WHEEL +1 -1
- pyfemtet-0.4.1.dist-info/entry_points.txt +3 -0
- pyfemtet/opt/base.py +0 -1490
- pyfemtet/opt/monitor.py +0 -474
- pyfemtet-0.3.12.dist-info/RECORD +0 -26
- /pyfemtet/opt/{_FemtetWithNX → interface/_femtet_with_nx}/update_model.py +0 -0
- {pyfemtet-0.3.12.dist-info → pyfemtet-0.4.1.dist-info}/LICENSE +0 -0
pyfemtet/opt/base.py
DELETED
|
@@ -1,1490 +0,0 @@
|
|
|
1
|
-
from abc import ABC, abstractmethod
|
|
2
|
-
from typing import List, Iterable
|
|
3
|
-
|
|
4
|
-
import os
|
|
5
|
-
import sys
|
|
6
|
-
import datetime
|
|
7
|
-
import inspect
|
|
8
|
-
import ast
|
|
9
|
-
from time import time, sleep
|
|
10
|
-
from threading import Thread
|
|
11
|
-
from subprocess import Popen
|
|
12
|
-
import warnings
|
|
13
|
-
|
|
14
|
-
import numpy as np
|
|
15
|
-
import pandas as pd
|
|
16
|
-
from scipy.stats.qmc import LatinHypercube
|
|
17
|
-
import optuna
|
|
18
|
-
from optuna.study import MaxTrialsCallback
|
|
19
|
-
from optuna.trial import TrialState
|
|
20
|
-
from optuna.exceptions import ExperimentalWarning
|
|
21
|
-
from optuna._hypervolume import WFG
|
|
22
|
-
from dask.distributed import LocalCluster, Client, Lock
|
|
23
|
-
|
|
24
|
-
from win32com.client import constants, Constants
|
|
25
|
-
|
|
26
|
-
from ..core import ModelError, MeshError, SolveError
|
|
27
|
-
from .interface import FEMInterface, FemtetInterface
|
|
28
|
-
from .monitor import Monitor
|
|
29
|
-
|
|
30
|
-
import logging
|
|
31
|
-
from ..logger import get_logger
|
|
32
|
-
logger = get_logger('opt')
|
|
33
|
-
logger.setLevel(logging.INFO)
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
warnings.filterwarnings('ignore', category=ExperimentalWarning)
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
def generate_lhs(bounds: List[List[float]], seed: int or None = None) -> np.ndarray:
|
|
40
|
-
"""Latin Hypercube Sampling from given design parameter bounds.
|
|
41
|
-
|
|
42
|
-
If the number of parameters is d,
|
|
43
|
-
sampler returns (N, d) shape ndarray.
|
|
44
|
-
N equals p**2, p is the minimum prime number over d.
|
|
45
|
-
For example, when d=3, then p=5 and N=25.
|
|
46
|
-
|
|
47
|
-
Args:
|
|
48
|
-
bounds (list[list[float]]): List of [lower_bound, upper_bound] of parameters.
|
|
49
|
-
seed (int or None, optional): Random seed. Defaults to None.
|
|
50
|
-
|
|
51
|
-
Returns:
|
|
52
|
-
np.ndarray: (N, d) shape ndarray.
|
|
53
|
-
"""
|
|
54
|
-
|
|
55
|
-
d = len(bounds)
|
|
56
|
-
|
|
57
|
-
sampler = LatinHypercube(
|
|
58
|
-
d,
|
|
59
|
-
scramble=False,
|
|
60
|
-
strength=2,
|
|
61
|
-
# optimization='lloyd',
|
|
62
|
-
optimization='random-cd',
|
|
63
|
-
seed=seed,
|
|
64
|
-
)
|
|
65
|
-
|
|
66
|
-
LIMIT = 100
|
|
67
|
-
|
|
68
|
-
def is_prime(p):
|
|
69
|
-
for j in range(2, p):
|
|
70
|
-
if p % j == 0:
|
|
71
|
-
return False
|
|
72
|
-
return True
|
|
73
|
-
|
|
74
|
-
def get_prime(_minimum):
|
|
75
|
-
for p in range(_minimum, LIMIT):
|
|
76
|
-
if is_prime(p):
|
|
77
|
-
return p
|
|
78
|
-
|
|
79
|
-
n = get_prime(d + 1) ** 2
|
|
80
|
-
data = sampler.random(n) # [0,1)
|
|
81
|
-
|
|
82
|
-
for i, (data_range, datum) in enumerate(zip(bounds, data.T)):
|
|
83
|
-
minimum, maximum = data_range
|
|
84
|
-
band = maximum - minimum
|
|
85
|
-
converted_datum = datum * band + minimum
|
|
86
|
-
data[:, i] = converted_datum
|
|
87
|
-
|
|
88
|
-
return data # data.shape = (N, d)
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
def symlog(x: float or np.ndarray):
|
|
92
|
-
"""Log function whose domain is extended to the negative region.
|
|
93
|
-
|
|
94
|
-
Symlog processing is performed internally as a measure to reduce
|
|
95
|
-
unintended trends caused by scale differences
|
|
96
|
-
between objective functions in multi-objective optimization.
|
|
97
|
-
|
|
98
|
-
Args:
|
|
99
|
-
x (float or np.ndarray)
|
|
100
|
-
|
|
101
|
-
Returns:
|
|
102
|
-
float
|
|
103
|
-
"""
|
|
104
|
-
|
|
105
|
-
if isinstance(x, np.ndarray):
|
|
106
|
-
ret = np.zeros(x.shape)
|
|
107
|
-
idx = np.where(x >= 0)
|
|
108
|
-
ret[idx] = np.log10(x[idx] + 1)
|
|
109
|
-
idx = np.where(x < 0)
|
|
110
|
-
ret[idx] = -np.log10(1 - x[idx])
|
|
111
|
-
else:
|
|
112
|
-
if x >= 0:
|
|
113
|
-
ret = np.log10(x + 1)
|
|
114
|
-
else:
|
|
115
|
-
ret = -np.log10(1 - x)
|
|
116
|
-
|
|
117
|
-
return ret
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
def _check_direction(direction):
|
|
121
|
-
message = '評価関数の direction は "minimize", "maximize", 又は数値でなければなりません.'
|
|
122
|
-
message += f'与えられた値は {direction} です.'
|
|
123
|
-
if isinstance(direction, float) or isinstance(direction, int):
|
|
124
|
-
pass
|
|
125
|
-
elif isinstance(direction, str):
|
|
126
|
-
if (direction != 'minimize') and (direction != 'maximize'):
|
|
127
|
-
raise ValueError(message)
|
|
128
|
-
else:
|
|
129
|
-
raise ValueError(message)
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
def _check_lb_ub(lb, ub, name=None):
|
|
133
|
-
message = f'下限{lb} > 上限{ub} です.'
|
|
134
|
-
if name is not None:
|
|
135
|
-
message = f'{name}に対して' + message
|
|
136
|
-
if (lb is not None) and (ub is not None):
|
|
137
|
-
if lb > ub:
|
|
138
|
-
raise ValueError(message)
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
def _is_access_gogh(fun):
|
|
142
|
-
|
|
143
|
-
# 関数fのソースコードを取得
|
|
144
|
-
source = inspect.getsource(fun)
|
|
145
|
-
|
|
146
|
-
# ソースコードを抽象構文木(AST)に変換
|
|
147
|
-
tree = ast.parse(source)
|
|
148
|
-
|
|
149
|
-
# 関数定義を見つける
|
|
150
|
-
for node in ast.walk(tree):
|
|
151
|
-
if isinstance(node, ast.FunctionDef):
|
|
152
|
-
# 関数の第一引数の名前を取得
|
|
153
|
-
first_arg_name = node.args.args[0].arg
|
|
154
|
-
|
|
155
|
-
# 関数内の全ての属性アクセスをチェック
|
|
156
|
-
for sub_node in ast.walk(node):
|
|
157
|
-
if isinstance(sub_node, ast.Attribute):
|
|
158
|
-
# 第一引数に対して 'Gogh' へのアクセスがあるかチェック
|
|
159
|
-
if (
|
|
160
|
-
isinstance(sub_node.value, ast.Name)
|
|
161
|
-
and sub_node.value.id == first_arg_name
|
|
162
|
-
and sub_node.attr == 'Gogh'
|
|
163
|
-
):
|
|
164
|
-
return True
|
|
165
|
-
# ここまできてもなければアクセスしてない
|
|
166
|
-
return False
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
def _is_feasible(value, lb, ub):
|
|
170
|
-
if lb is None and ub is not None:
|
|
171
|
-
return value < ub
|
|
172
|
-
elif lb is not None and ub is None:
|
|
173
|
-
return lb < value
|
|
174
|
-
elif lb is not None and ub is not None:
|
|
175
|
-
return lb < value < ub
|
|
176
|
-
else:
|
|
177
|
-
return True
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
class _Scapegoat:
|
|
181
|
-
"""Helper class for parallelize Femtet."""
|
|
182
|
-
# constants を含む関数を並列化するために
|
|
183
|
-
# メイン処理で一時的に constants への参照を
|
|
184
|
-
# このオブジェクトにして、後で restore する
|
|
185
|
-
def __init__(self, ignore=False):
|
|
186
|
-
self._ignore_when_restore_constants = ignore
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
class Function:
|
|
190
|
-
"""Base class for Objective and Constraint."""
|
|
191
|
-
|
|
192
|
-
def __init__(self, fun, name, args, kwargs):
|
|
193
|
-
|
|
194
|
-
# serializable でない COM 定数を parallelize するため
|
|
195
|
-
# COM 定数を一度 _Scapegoat 型のオブジェクトにする
|
|
196
|
-
for varname in fun.__globals__:
|
|
197
|
-
if isinstance(fun.__globals__[varname], Constants):
|
|
198
|
-
fun.__globals__[varname] = _Scapegoat()
|
|
199
|
-
|
|
200
|
-
self.fun = fun
|
|
201
|
-
self.name = name
|
|
202
|
-
self.args = args
|
|
203
|
-
self.kwargs = kwargs
|
|
204
|
-
|
|
205
|
-
def calc(self, fem: FEMInterface):
|
|
206
|
-
"""Execute user-defined fun.
|
|
207
|
-
|
|
208
|
-
Args:
|
|
209
|
-
fem (FEMInterface)
|
|
210
|
-
|
|
211
|
-
Returns:
|
|
212
|
-
float
|
|
213
|
-
"""
|
|
214
|
-
args = self.args
|
|
215
|
-
# Femtet 特有の処理
|
|
216
|
-
if isinstance(fem, FemtetInterface):
|
|
217
|
-
args = (fem.Femtet, *args)
|
|
218
|
-
return float(self.fun(*args, **self.kwargs))
|
|
219
|
-
|
|
220
|
-
def _restore_constants(self):
|
|
221
|
-
"""Helper function for parallelize Femtet."""
|
|
222
|
-
fun = self.fun
|
|
223
|
-
for varname in fun.__globals__:
|
|
224
|
-
if isinstance(fun.__globals__[varname], _Scapegoat):
|
|
225
|
-
if not fun.__globals__[varname]._ignore_when_restore_constants:
|
|
226
|
-
fun.__globals__[varname] = constants
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
class Objective(Function):
|
|
230
|
-
"""Class for registering user-defined objective function."""
|
|
231
|
-
|
|
232
|
-
default_name = 'obj'
|
|
233
|
-
|
|
234
|
-
def __init__(self, fun, name, direction, args, kwargs):
|
|
235
|
-
"""Initializes an Objective instance.
|
|
236
|
-
|
|
237
|
-
Args:
|
|
238
|
-
fun: The user-defined objective function.
|
|
239
|
-
name (str): The name of the objective function.
|
|
240
|
-
direction (str or float or int): The direction of optimization.
|
|
241
|
-
args: Additional arguments for the objective function.
|
|
242
|
-
kwargs: Additional keyword arguments for the objective function.
|
|
243
|
-
|
|
244
|
-
Raises:
|
|
245
|
-
ValueError: If the direction is not valid.
|
|
246
|
-
|
|
247
|
-
Note:
|
|
248
|
-
If FEMOpt.fem is a instance of FemtetInterface or its subclass,
|
|
249
|
-
the 1st argument of fun is set to fem automatically.
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
"""
|
|
253
|
-
_check_direction(direction)
|
|
254
|
-
self.direction = direction
|
|
255
|
-
super().__init__(fun, name, args, kwargs)
|
|
256
|
-
|
|
257
|
-
def convert(self, value: float):
|
|
258
|
-
"""Converts an evaluation value to the value of objective function based on the specified direction.
|
|
259
|
-
|
|
260
|
-
When direction is `'minimize'`, ``value`` is calculated.
|
|
261
|
-
When direction is `'maximize'`, ``-value`` is calculated.
|
|
262
|
-
When direction is float, ``abs(value - direction)`` is calculated.
|
|
263
|
-
Finally, the calculated value is passed to the symlog function and returns it.
|
|
264
|
-
|
|
265
|
-
``value`` is the return value of the user-defined function.
|
|
266
|
-
|
|
267
|
-
Args:
|
|
268
|
-
value (float): The evaluation value to be converted.
|
|
269
|
-
|
|
270
|
-
Returns:
|
|
271
|
-
float: The converted objective value.
|
|
272
|
-
|
|
273
|
-
"""
|
|
274
|
-
|
|
275
|
-
# 評価関数(direction 任意)を目的関数(minimize, symlog)に変換する
|
|
276
|
-
ret = value
|
|
277
|
-
if isinstance(self.direction, float) or isinstance(self.direction, int):
|
|
278
|
-
ret = abs(value - self.direction)
|
|
279
|
-
elif self.direction == 'minimize':
|
|
280
|
-
ret = value
|
|
281
|
-
elif self.direction == 'maximize':
|
|
282
|
-
ret = -value
|
|
283
|
-
|
|
284
|
-
ret = symlog(ret)
|
|
285
|
-
|
|
286
|
-
return float(ret)
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
class Constraint(Function):
|
|
290
|
-
"""Class for registering user-defined constraint function."""
|
|
291
|
-
|
|
292
|
-
default_name = 'cns'
|
|
293
|
-
|
|
294
|
-
def __init__(self, fun, name, lb, ub, strict, args, kwargs):
|
|
295
|
-
"""Initializes a Constraint instance.
|
|
296
|
-
|
|
297
|
-
Args:
|
|
298
|
-
fun: The user-defined constraint function.
|
|
299
|
-
name (str): The name of the constraint function.
|
|
300
|
-
lb: The lower bound of the constraint.
|
|
301
|
-
ub: The upper bound of the constraint.
|
|
302
|
-
strict (bool): Whether to enforce strict inequality for the bounds.
|
|
303
|
-
args: Additional arguments for the constraint function.
|
|
304
|
-
kwargs: Additional keyword arguments for the constraint function.
|
|
305
|
-
|
|
306
|
-
Raises:
|
|
307
|
-
ValueError: If the lower bound is greater than or equal to the upper bound.
|
|
308
|
-
|
|
309
|
-
"""
|
|
310
|
-
|
|
311
|
-
_check_lb_ub(lb, ub)
|
|
312
|
-
self.lb = lb
|
|
313
|
-
self.ub = ub
|
|
314
|
-
self.strict = strict
|
|
315
|
-
super().__init__(fun, name, args, kwargs)
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
class _HistoryDfCore:
|
|
319
|
-
"""Class for managing a DataFrame object in a distributed manner."""
|
|
320
|
-
|
|
321
|
-
def __init__(self):
|
|
322
|
-
self.df = pd.DataFrame()
|
|
323
|
-
|
|
324
|
-
def set_df(self, df):
|
|
325
|
-
self.df = df
|
|
326
|
-
|
|
327
|
-
def get_df(self):
|
|
328
|
-
return self.df
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
class History:
|
|
332
|
-
"""Class for managing the history of optimization results.
|
|
333
|
-
|
|
334
|
-
Attributes:
|
|
335
|
-
path (str): The path to the history csv file.
|
|
336
|
-
is_restart (bool): The main session is restarted or not.
|
|
337
|
-
param_names (list): The names of the parameters in the study.
|
|
338
|
-
obj_names (list): The names of the objectives in the study.
|
|
339
|
-
cns_names (list): The names of the constraints in the study.
|
|
340
|
-
actor_data (pd.DataFrame): The history data of optimization.
|
|
341
|
-
|
|
342
|
-
"""
|
|
343
|
-
def __init__(self, history_path, client):
|
|
344
|
-
"""Initializes a History instance.
|
|
345
|
-
|
|
346
|
-
Args:
|
|
347
|
-
history_path (str): The path to the history file.
|
|
348
|
-
|
|
349
|
-
"""
|
|
350
|
-
|
|
351
|
-
# 引数の処理
|
|
352
|
-
self.path = history_path # .csv
|
|
353
|
-
self.is_restart = False
|
|
354
|
-
self._future = client.submit(_HistoryDfCore, actor=True)
|
|
355
|
-
self._actor_data = self._future.result()
|
|
356
|
-
self.tmp_data = pd.DataFrame()
|
|
357
|
-
self.param_names = []
|
|
358
|
-
self.obj_names = []
|
|
359
|
-
self.cns_names = []
|
|
360
|
-
|
|
361
|
-
# path が存在すれば dataframe を読み込む
|
|
362
|
-
if os.path.isfile(self.path):
|
|
363
|
-
self.tmp_data = pd.read_csv(self.path, encoding='shift-jis')
|
|
364
|
-
self.actor_data = self.tmp_data
|
|
365
|
-
self.is_restart = True
|
|
366
|
-
|
|
367
|
-
@property
|
|
368
|
-
def actor_data(self):
|
|
369
|
-
return self._actor_data.get_df().result()
|
|
370
|
-
|
|
371
|
-
@actor_data.setter
|
|
372
|
-
def actor_data(self, df):
|
|
373
|
-
self._actor_data.set_df(df).result()
|
|
374
|
-
|
|
375
|
-
def init(self, param_names, obj_names, cns_names):
|
|
376
|
-
"""Initializes the parameter, objective, and constraint names in the History instance.
|
|
377
|
-
|
|
378
|
-
Args:
|
|
379
|
-
param_names (list): The names of parameters in optimization.
|
|
380
|
-
obj_names (list): The names of objectives in optimization.
|
|
381
|
-
cns_names (list): The names of constraints in optimization.
|
|
382
|
-
|
|
383
|
-
"""
|
|
384
|
-
self.param_names = param_names
|
|
385
|
-
self.obj_names = obj_names
|
|
386
|
-
self.cns_names = cns_names
|
|
387
|
-
|
|
388
|
-
columns = list()
|
|
389
|
-
columns.append('trial') # index
|
|
390
|
-
columns.extend(self.param_names) # parameters
|
|
391
|
-
for obj_name in self.obj_names: # objectives, direction
|
|
392
|
-
columns.extend([obj_name, f'{obj_name}_direction'])
|
|
393
|
-
columns.append('non_domi')
|
|
394
|
-
for cns_name in cns_names: # cns, lb, ub
|
|
395
|
-
columns.extend([cns_name, f'{cns_name}_lb', f'{cns_name}_ub'])
|
|
396
|
-
columns.append('feasible')
|
|
397
|
-
columns.append('hypervolume')
|
|
398
|
-
columns.append('message')
|
|
399
|
-
columns.append('time')
|
|
400
|
-
|
|
401
|
-
# restart ならば前のデータとの整合を確認
|
|
402
|
-
if len(self.actor_data.columns) > 0:
|
|
403
|
-
# 読み込んだ columns が生成した columns と違っていればエラー
|
|
404
|
-
try:
|
|
405
|
-
if list(self.actor_data.columns) != columns:
|
|
406
|
-
raise Exception(f'読み込んだ history と問題の設定が異なります. \n\n読み込まれた設定:\n{list(self.actor_data.columns)}\n\n現在の設定:\n{columns}')
|
|
407
|
-
else:
|
|
408
|
-
# 同じであっても目的と拘束の上下限や direction が違えばエラー
|
|
409
|
-
pass
|
|
410
|
-
except ValueError:
|
|
411
|
-
raise Exception(f'読み込んだ history と問題の設定が異なります. \n\n読み込まれた設定:\n{list(self.actor_data.columns)}\n\n現在の設定:\n{columns}')
|
|
412
|
-
|
|
413
|
-
else:
|
|
414
|
-
for column in columns:
|
|
415
|
-
self.tmp_data[column] = None
|
|
416
|
-
# actor_data は actor 経由の getter property なので self.data[column] = ... とやっても
|
|
417
|
-
# actor には変更が反映されない. 以下同様
|
|
418
|
-
tmp = self.actor_data
|
|
419
|
-
for column in columns:
|
|
420
|
-
tmp[column] = None
|
|
421
|
-
self.actor_data = tmp
|
|
422
|
-
|
|
423
|
-
def record(self, parameters, objectives, constraints, obj_values, cns_values, message):
|
|
424
|
-
"""Records the optimization results in the history.
|
|
425
|
-
|
|
426
|
-
Args:
|
|
427
|
-
parameters (pd.DataFrame): The parameter values.
|
|
428
|
-
objectives (dict): The objective functions.
|
|
429
|
-
constraints (dict): The constraint functions.
|
|
430
|
-
obj_values (list): The objective values.
|
|
431
|
-
cns_values (list): The constraint values.
|
|
432
|
-
message (str): Additional information or messages related to the optimization results.
|
|
433
|
-
|
|
434
|
-
"""
|
|
435
|
-
|
|
436
|
-
# create row
|
|
437
|
-
row = list()
|
|
438
|
-
row.append(-1) # dummy trial index
|
|
439
|
-
row.extend(parameters['value'].values)
|
|
440
|
-
for (name, obj), obj_value in zip(objectives.items(), obj_values): # objectives, direction
|
|
441
|
-
row.extend([obj_value, obj.direction])
|
|
442
|
-
row.append(False) # dummy non_domi
|
|
443
|
-
feasible_list = []
|
|
444
|
-
for (name, cns), cns_value in zip(constraints.items(), cns_values): # cns, lb, ub
|
|
445
|
-
row.extend([cns_value, cns.lb, cns.ub])
|
|
446
|
-
feasible_list.append(_is_feasible(cns_value, cns.lb, cns.ub))
|
|
447
|
-
row.append(all(feasible_list))
|
|
448
|
-
row.append(-1.) # dummy hypervolume
|
|
449
|
-
row.append(message) # message
|
|
450
|
-
row.append(datetime.datetime.now()) # time
|
|
451
|
-
|
|
452
|
-
with Lock('calc-history'):
|
|
453
|
-
# append
|
|
454
|
-
if len(self.actor_data) == 0:
|
|
455
|
-
self.tmp_data = pd.DataFrame([row], columns=self.actor_data.columns)
|
|
456
|
-
else:
|
|
457
|
-
self.tmp_data = self.actor_data
|
|
458
|
-
self.tmp_data.loc[len(self.tmp_data)] = row
|
|
459
|
-
|
|
460
|
-
# calc
|
|
461
|
-
self.tmp_data['trial'] = np.arange(len(self.tmp_data)) + 1 # 1 始まり
|
|
462
|
-
self._calc_non_domi(objectives) # update self.tmp_data
|
|
463
|
-
self._calc_hypervolume(objectives) # update self.tmp_data
|
|
464
|
-
self.actor_data = self.tmp_data
|
|
465
|
-
|
|
466
|
-
def _calc_non_domi(self, objectives):
|
|
467
|
-
|
|
468
|
-
# 目的関数の履歴を取り出してくる
|
|
469
|
-
solution_set = self.tmp_data[self.obj_names]
|
|
470
|
-
|
|
471
|
-
# 最小化問題の座標空間に変換する
|
|
472
|
-
for name, objective in objectives.items():
|
|
473
|
-
solution_set.loc[:, name] = solution_set[name].map(objective.convert)
|
|
474
|
-
|
|
475
|
-
# 非劣解の計算
|
|
476
|
-
non_domi = []
|
|
477
|
-
for i, row in solution_set.iterrows():
|
|
478
|
-
non_domi.append((row > solution_set).product(axis=1).sum(axis=0) == 0)
|
|
479
|
-
|
|
480
|
-
# 非劣解の登録
|
|
481
|
-
self.tmp_data['non_domi'] = non_domi
|
|
482
|
-
|
|
483
|
-
def _calc_hypervolume(self, objectives):
|
|
484
|
-
# タイピングが面倒
|
|
485
|
-
df = self.tmp_data
|
|
486
|
-
|
|
487
|
-
# パレート集合の抽出
|
|
488
|
-
idx = df['non_domi'].values
|
|
489
|
-
pdf = df[idx]
|
|
490
|
-
pareto_set = pdf[self.obj_names].values
|
|
491
|
-
n = len(pareto_set) # 集合の要素数
|
|
492
|
-
m = len(pareto_set.T) # 目的変数数
|
|
493
|
-
# 多目的でないと計算できない
|
|
494
|
-
if m <= 1:
|
|
495
|
-
return None
|
|
496
|
-
# 長さが 2 以上でないと計算できない
|
|
497
|
-
if n <= 1:
|
|
498
|
-
return None
|
|
499
|
-
# 最小化問題に convert
|
|
500
|
-
for i, (name, objective) in enumerate(objectives.items()):
|
|
501
|
-
for j in range(n):
|
|
502
|
-
pareto_set[j, i] = objective.convert(pareto_set[j, i])
|
|
503
|
-
#### reference point の計算[1]
|
|
504
|
-
# 逆正規化のための範囲計算
|
|
505
|
-
maximum = pareto_set.max(axis=0)
|
|
506
|
-
minimum = pareto_set.min(axis=0)
|
|
507
|
-
|
|
508
|
-
# # [1]Hisao Ishibuchi et al. "Reference Point Specification in Hypercolume Calculation for Fair Comparison and Efficient Search"
|
|
509
|
-
# # (H+m-1)C(m-1) <= n <= (m-1)C(H+m) になるような H を探す[1]
|
|
510
|
-
# H = 0
|
|
511
|
-
# while True:
|
|
512
|
-
# left = math.comb(H + m - 1, m - 1)
|
|
513
|
-
# right = math.comb(H + m, m - 1)
|
|
514
|
-
# if left <= n <= right:
|
|
515
|
-
# break
|
|
516
|
-
# else:
|
|
517
|
-
# H += 1
|
|
518
|
-
# # H==0 なら r は最大の値
|
|
519
|
-
# if H == 0:
|
|
520
|
-
# r = 2
|
|
521
|
-
# else:
|
|
522
|
-
# # r を計算
|
|
523
|
-
# r = 1 + 1. / H
|
|
524
|
-
r = 1.01
|
|
525
|
-
|
|
526
|
-
# r を逆正規化
|
|
527
|
-
reference_point = r * (maximum - minimum) + minimum
|
|
528
|
-
|
|
529
|
-
#### hv 履歴の計算
|
|
530
|
-
wfg = WFG()
|
|
531
|
-
hvs = []
|
|
532
|
-
for i in range(n):
|
|
533
|
-
hv = wfg.compute(pareto_set[:i], reference_point)
|
|
534
|
-
if np.isnan(hv):
|
|
535
|
-
hv = 0
|
|
536
|
-
hvs.append(hv)
|
|
537
|
-
|
|
538
|
-
# 計算結果を履歴の一部に割り当て
|
|
539
|
-
df.loc[idx, 'hypervolume'] = np.array(hvs)
|
|
540
|
-
|
|
541
|
-
# dominated の行に対して、上に見ていって
|
|
542
|
-
# 最初に見つけた non-domi 行の hypervolume の値を割り当てます
|
|
543
|
-
for i in range(len(df)):
|
|
544
|
-
if not df.loc[i, 'non_domi']:
|
|
545
|
-
try:
|
|
546
|
-
df.loc[i, 'hypervolume'] = df.loc[:i][df.loc[:i]['non_domi']].iloc[-1]['hypervolume']
|
|
547
|
-
except IndexError:
|
|
548
|
-
df.loc[i, 'hypervolume'] = 0
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
class _OptimizationStatusActor:
|
|
552
|
-
status_int = -1
|
|
553
|
-
status = 'undefined'
|
|
554
|
-
|
|
555
|
-
def set(self, value, text):
|
|
556
|
-
self.status_int = value
|
|
557
|
-
self.status = text
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
class OptimizationStatus:
|
|
561
|
-
"""Optimization status."""
|
|
562
|
-
UNDEFINED = -1
|
|
563
|
-
INITIALIZING = 0
|
|
564
|
-
SETTING_UP = 10
|
|
565
|
-
LAUNCHING_FEM = 20
|
|
566
|
-
WAIT_OTHER_WORKERS = 22
|
|
567
|
-
# WAIT_1ST = 25
|
|
568
|
-
RUNNING = 30
|
|
569
|
-
INTERRUPTING = 40
|
|
570
|
-
TERMINATED = 50
|
|
571
|
-
TERMINATE_ALL = 60
|
|
572
|
-
|
|
573
|
-
def __init__(self, client, name='entire'):
|
|
574
|
-
self._future = client.submit(_OptimizationStatusActor, actor=True)
|
|
575
|
-
self._actor = self._future.result()
|
|
576
|
-
self.name = name
|
|
577
|
-
self.set(self.INITIALIZING)
|
|
578
|
-
|
|
579
|
-
@classmethod
|
|
580
|
-
def const_to_str(cls, status_const):
|
|
581
|
-
if status_const == cls.UNDEFINED: return 'Undefined'
|
|
582
|
-
if status_const == cls.INITIALIZING: return 'Initializing'
|
|
583
|
-
if status_const == cls.SETTING_UP: return 'Setting up'
|
|
584
|
-
if status_const == cls.LAUNCHING_FEM: return 'Launching FEM processes'
|
|
585
|
-
if status_const == cls.WAIT_OTHER_WORKERS: return 'Waiting for launching other processes'
|
|
586
|
-
# if status_const == cls.WAIT_1ST: return 'Running and waiting for 1st FEM result.'
|
|
587
|
-
if status_const == cls.RUNNING: return 'Running'
|
|
588
|
-
if status_const == cls.INTERRUPTING: return 'Interrupting'
|
|
589
|
-
if status_const == cls.TERMINATED: return 'Terminated'
|
|
590
|
-
if status_const == cls.TERMINATE_ALL: return 'Terminate_all'
|
|
591
|
-
|
|
592
|
-
def set(self, status_const):
|
|
593
|
-
self._actor.set(status_const, self.const_to_str(status_const)).result()
|
|
594
|
-
msg = f'---{self.const_to_str(status_const)}---'
|
|
595
|
-
if (status_const == self.INITIALIZING) and (self.name != 'entire'):
|
|
596
|
-
msg += f' (for Worker {self.name})'
|
|
597
|
-
if self.name == 'entire':
|
|
598
|
-
msg = '(entire) ' + msg
|
|
599
|
-
logger.info(msg)
|
|
600
|
-
|
|
601
|
-
def get(self):
|
|
602
|
-
return self._actor.status_int
|
|
603
|
-
|
|
604
|
-
def get_text(self):
|
|
605
|
-
return self._actor.status
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
class AbstractOptimizer(ABC):
|
|
609
|
-
"""Abstract base class for an interface of optimization library.
|
|
610
|
-
|
|
611
|
-
Attributes:
|
|
612
|
-
fem (FEMInterface): The finite element method object.
|
|
613
|
-
fem_class (type): The class of the finite element method object.
|
|
614
|
-
fem_kwargs (dict): The keyword arguments used to instantiate the finite element method object.
|
|
615
|
-
parameters (pd.DataFrame): The parameters used in the optimization.
|
|
616
|
-
objectives (dict): A dictionary containing the objective functions used in the optimization.
|
|
617
|
-
constraints (dict): A dictionary containing the constraint functions used in the optimization.
|
|
618
|
-
entire_status (OptimizationStatus): The status of the entire optimization process.
|
|
619
|
-
history (History): An actor object that records the history of each iteration in the optimization process.
|
|
620
|
-
worker_status (OptimizationStatus): The status of each worker in a distributed computing environment.
|
|
621
|
-
message (str): A message associated with the current state of the optimization process.
|
|
622
|
-
seed (int or None): The random seed used for random number generation during the optimization process.
|
|
623
|
-
timeout (float or int or None): The maximum time allowed for each iteration of the optimization process. If exceeded, it will be interrupted and terminated early.
|
|
624
|
-
n_trials (int or None): The maximum number of trials allowed for each iteration of the optimization process. If exceeded, it will be interrupted and terminated early.
|
|
625
|
-
is_cluster (bool): Flag indicating if running on a distributed computing cluster.
|
|
626
|
-
|
|
627
|
-
"""
|
|
628
|
-
|
|
629
|
-
def __init__(self):
|
|
630
|
-
self.fem = None
|
|
631
|
-
self.fem_class = None
|
|
632
|
-
self.fem_kwargs = dict()
|
|
633
|
-
self.parameters = pd.DataFrame()
|
|
634
|
-
self.objectives = dict()
|
|
635
|
-
self.constraints = dict()
|
|
636
|
-
self.entire_status = None # actor
|
|
637
|
-
self.history = None # actor
|
|
638
|
-
self.worker_status = None # actor
|
|
639
|
-
self.message = ''
|
|
640
|
-
self.seed = None
|
|
641
|
-
self.timeout = None
|
|
642
|
-
self.n_trials = None
|
|
643
|
-
self.is_cluster = False
|
|
644
|
-
|
|
645
|
-
def f(self, x):
|
|
646
|
-
"""Get x, update fem analysis, return objectives (and constraints)."""
|
|
647
|
-
# interruption の実装は具象クラスに任せる
|
|
648
|
-
|
|
649
|
-
# x の更新
|
|
650
|
-
self.parameters['value'] = x
|
|
651
|
-
|
|
652
|
-
# FEM の更新
|
|
653
|
-
logger.debug('fem.update() start')
|
|
654
|
-
self.fem.update(self.parameters)
|
|
655
|
-
|
|
656
|
-
# y, _y, c の更新
|
|
657
|
-
logger.debug('calculate y start')
|
|
658
|
-
y = [obj.calc(self.fem) for obj in self.objectives.values()]
|
|
659
|
-
|
|
660
|
-
logger.debug('calculate _y start')
|
|
661
|
-
_y = [obj.convert(value) for obj, value in zip(self.objectives.values(), y)]
|
|
662
|
-
|
|
663
|
-
logger.debug('calculate c start')
|
|
664
|
-
c = [cns.calc(self.fem) for cns in self.constraints.values()]
|
|
665
|
-
|
|
666
|
-
logger.debug('history.record start')
|
|
667
|
-
self.history.record(
|
|
668
|
-
self.parameters,
|
|
669
|
-
self.objectives,
|
|
670
|
-
self.constraints,
|
|
671
|
-
y,
|
|
672
|
-
c,
|
|
673
|
-
self.message
|
|
674
|
-
)
|
|
675
|
-
|
|
676
|
-
logger.debug('history.record end')
|
|
677
|
-
return np.array(y), np.array(_y), np.array(c)
|
|
678
|
-
|
|
679
|
-
def set_fem(self, skip_reconstruct=False):
|
|
680
|
-
"""Reconstruct FEMInterface in a subprocess."""
|
|
681
|
-
# restore fem
|
|
682
|
-
if not skip_reconstruct:
|
|
683
|
-
self.fem = self.fem_class(**self.fem_kwargs)
|
|
684
|
-
|
|
685
|
-
# COM 定数の restore
|
|
686
|
-
for obj in self.objectives.values():
|
|
687
|
-
obj._restore_constants()
|
|
688
|
-
for cns in self.constraints.values():
|
|
689
|
-
cns._restore_constants()
|
|
690
|
-
|
|
691
|
-
def get_parameter(self, format='dict'):
|
|
692
|
-
"""Returns the parameters in the specified format.
|
|
693
|
-
|
|
694
|
-
Args:
|
|
695
|
-
format (str, optional): The desired format of the parameters. Can be 'df' (DataFrame), 'values', or 'dict'. Defaults to 'dict'.
|
|
696
|
-
|
|
697
|
-
Returns:
|
|
698
|
-
object: The parameters in the specified format.
|
|
699
|
-
|
|
700
|
-
Raises:
|
|
701
|
-
ValueError: If an invalid format is provided.
|
|
702
|
-
|
|
703
|
-
"""
|
|
704
|
-
if format == 'df':
|
|
705
|
-
return self.parameters
|
|
706
|
-
elif format == 'values' or format == 'value':
|
|
707
|
-
return self.parameters.value.values
|
|
708
|
-
elif format == 'dict':
|
|
709
|
-
ret = {}
|
|
710
|
-
for i, row in self.parameters.iterrows():
|
|
711
|
-
ret[row['name']] = row.value
|
|
712
|
-
return ret
|
|
713
|
-
else:
|
|
714
|
-
raise ValueError('get_parameter() got invalid format: {format}')
|
|
715
|
-
|
|
716
|
-
def _check_interruption(self):
|
|
717
|
-
""""""
|
|
718
|
-
if self.entire_status.get() == OptimizationStatus.INTERRUPTING:
|
|
719
|
-
self.worker_status.set(OptimizationStatus.INTERRUPTING)
|
|
720
|
-
self.finalize()
|
|
721
|
-
return True
|
|
722
|
-
else:
|
|
723
|
-
return False
|
|
724
|
-
|
|
725
|
-
def finalize(self):
|
|
726
|
-
"""Destruct fem and set worker status."""
|
|
727
|
-
del self.fem
|
|
728
|
-
self.worker_status.set(OptimizationStatus.TERMINATED)
|
|
729
|
-
|
|
730
|
-
def _main(
|
|
731
|
-
self,
|
|
732
|
-
subprocess_idx,
|
|
733
|
-
worker_status_list,
|
|
734
|
-
wait_setup,
|
|
735
|
-
skip_set_fem=False,
|
|
736
|
-
) -> None:
|
|
737
|
-
|
|
738
|
-
# 自分の worker_status の取得
|
|
739
|
-
self.worker_status = worker_status_list[subprocess_idx]
|
|
740
|
-
self.worker_status.set(OptimizationStatus.LAUNCHING_FEM)
|
|
741
|
-
|
|
742
|
-
if self._check_interruption():
|
|
743
|
-
return None
|
|
744
|
-
|
|
745
|
-
# set_fem をはじめ、終了したらそれを示す
|
|
746
|
-
if not skip_set_fem: # なくても動く??
|
|
747
|
-
self.set_fem()
|
|
748
|
-
self.fem.setup_after_parallel()
|
|
749
|
-
self.worker_status.set(OptimizationStatus.WAIT_OTHER_WORKERS)
|
|
750
|
-
|
|
751
|
-
# wait_setup or not
|
|
752
|
-
if wait_setup:
|
|
753
|
-
while True:
|
|
754
|
-
if self._check_interruption():
|
|
755
|
-
return None
|
|
756
|
-
# 他のすべての worker_status が wait 以上になったら break
|
|
757
|
-
if all([ws.get() >= OptimizationStatus.WAIT_OTHER_WORKERS for ws in worker_status_list]):
|
|
758
|
-
break
|
|
759
|
-
sleep(1)
|
|
760
|
-
else:
|
|
761
|
-
if self._check_interruption():
|
|
762
|
-
return None
|
|
763
|
-
|
|
764
|
-
# set status running
|
|
765
|
-
if self.entire_status.get() < OptimizationStatus.RUNNING:
|
|
766
|
-
self.entire_status.set(OptimizationStatus.RUNNING)
|
|
767
|
-
self.worker_status.set(OptimizationStatus.RUNNING)
|
|
768
|
-
|
|
769
|
-
# run and finalize
|
|
770
|
-
try:
|
|
771
|
-
self.main(subprocess_idx)
|
|
772
|
-
finally:
|
|
773
|
-
self.finalize()
|
|
774
|
-
|
|
775
|
-
return None
|
|
776
|
-
|
|
777
|
-
@abstractmethod
|
|
778
|
-
def main(self, subprocess_idx: int = 0) -> None:
|
|
779
|
-
"""Start calcuration using optimization library."""
|
|
780
|
-
pass
|
|
781
|
-
|
|
782
|
-
@abstractmethod
|
|
783
|
-
def setup_before_parallel(self, *args, **kwargs):
|
|
784
|
-
"""Setup before parallel processes are launched."""
|
|
785
|
-
pass
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
class OptunaOptimizer(AbstractOptimizer):
|
|
789
|
-
|
|
790
|
-
def __init__(
|
|
791
|
-
self,
|
|
792
|
-
sampler_class: optuna.samplers.BaseSampler or None = None,
|
|
793
|
-
sampler_kwargs: dict or None = None,
|
|
794
|
-
add_init_method: str or Iterable[str] or None = None
|
|
795
|
-
):
|
|
796
|
-
super().__init__()
|
|
797
|
-
self.study_name = None
|
|
798
|
-
self.storage = None
|
|
799
|
-
self.study = None
|
|
800
|
-
self.optimize_callbacks = []
|
|
801
|
-
self.sampler_class = optuna.samplers.TPESampler if sampler_class is None else sampler_class
|
|
802
|
-
self.sampler_kwargs = dict() if sampler_kwargs is None else sampler_kwargs
|
|
803
|
-
self.additional_initial_parameter = []
|
|
804
|
-
self.additional_initial_methods = add_init_method if hasattr(add_init_method, '__iter__') else [add_init_method]
|
|
805
|
-
|
|
806
|
-
def _objective(self, trial):
|
|
807
|
-
|
|
808
|
-
# 中断の確認 (FAIL loop に陥る対策)
|
|
809
|
-
if self.entire_status.get() == OptimizationStatus.INTERRUPTING:
|
|
810
|
-
self.worker_status.set(OptimizationStatus.INTERRUPTING)
|
|
811
|
-
trial.study.stop() # 現在実行中の trial を最後にする
|
|
812
|
-
return None # set TrialState FAIL
|
|
813
|
-
|
|
814
|
-
# candidate x
|
|
815
|
-
x = []
|
|
816
|
-
for i, row in self.parameters.iterrows():
|
|
817
|
-
v = trial.suggest_float(row['name'], row['lb'], row['ub'])
|
|
818
|
-
x.append(v)
|
|
819
|
-
x = np.array(x).astype(float)
|
|
820
|
-
|
|
821
|
-
# message の設定
|
|
822
|
-
self.message = trial.user_attrs['message'] if 'message' in trial.user_attrs.keys() else ''
|
|
823
|
-
|
|
824
|
-
# fem や opt 経由で変数を取得して constraint を計算する時のためにアップデート
|
|
825
|
-
self.parameters['value'] = x
|
|
826
|
-
self.fem.update_parameter(self.parameters)
|
|
827
|
-
|
|
828
|
-
# strict 拘束
|
|
829
|
-
strict_constraints = [cns for cns in self.constraints.values() if cns.strict]
|
|
830
|
-
for cns in strict_constraints:
|
|
831
|
-
feasible = True
|
|
832
|
-
cns_value = cns.calc(self.fem)
|
|
833
|
-
if cns.lb is not None:
|
|
834
|
-
feasible = feasible and (cns_value >= cns.lb)
|
|
835
|
-
if cns.ub is not None:
|
|
836
|
-
feasible = feasible and (cns.ub >= cns_value)
|
|
837
|
-
if not feasible:
|
|
838
|
-
logger.info(f'以下の変数で拘束 {cns.name} が満たされませんでした。')
|
|
839
|
-
print(self.get_parameter('dict'))
|
|
840
|
-
raise optuna.TrialPruned() # set TrialState PRUNED because FAIL causes similar candidate loop.
|
|
841
|
-
|
|
842
|
-
# 計算
|
|
843
|
-
try:
|
|
844
|
-
_, _y, c = self.f(x)
|
|
845
|
-
except (ModelError, MeshError, SolveError) as e:
|
|
846
|
-
logger.info(e)
|
|
847
|
-
logger.info('以下の変数で FEM 解析に失敗しました。')
|
|
848
|
-
print(self.get_parameter('dict'))
|
|
849
|
-
|
|
850
|
-
# 中断の確認 (解析中に interrupt されている場合対策)
|
|
851
|
-
if self.entire_status.get() == OptimizationStatus.INTERRUPTING:
|
|
852
|
-
self.worker_status.set(OptimizationStatus.INTERRUPTING)
|
|
853
|
-
trial.study.stop() # 現在実行中の trial を最後にする
|
|
854
|
-
return None # set TrialState FAIL
|
|
855
|
-
|
|
856
|
-
raise optuna.TrialPruned() # set TrialState PRUNED because FAIL causes similar candidate loop.
|
|
857
|
-
|
|
858
|
-
# 拘束 attr の更新
|
|
859
|
-
_c = [] # 非正なら OK
|
|
860
|
-
for (name, cns), c_value in zip(self.constraints.items(), c):
|
|
861
|
-
lb, ub = cns.lb, cns.ub
|
|
862
|
-
if lb is not None: # fun >= lb <=> lb - fun <= 0
|
|
863
|
-
_c.append(lb - c_value)
|
|
864
|
-
if ub is not None: # ub >= fun <=> fun - ub <= 0
|
|
865
|
-
_c.append(c_value - ub)
|
|
866
|
-
trial.set_user_attr('constraint', _c)
|
|
867
|
-
|
|
868
|
-
# 中断の確認 (解析中に interrupt されている場合対策)
|
|
869
|
-
if self.entire_status.get() == OptimizationStatus.INTERRUPTING:
|
|
870
|
-
self.worker_status.set(OptimizationStatus.INTERRUPTING)
|
|
871
|
-
trial.study.stop() # 現在実行中の trial を最後にする
|
|
872
|
-
return None # set TrialState FAIL
|
|
873
|
-
|
|
874
|
-
# 結果
|
|
875
|
-
return tuple(_y)
|
|
876
|
-
|
|
877
|
-
def _constraint(self, trial):
|
|
878
|
-
return trial.user_attrs['constraint'] if 'constraint' in trial.user_attrs.keys() else (1,) # infeasible
|
|
879
|
-
|
|
880
|
-
def setup_before_parallel(self):
|
|
881
|
-
"""Create storage, study and set initial parameter."""
|
|
882
|
-
|
|
883
|
-
# create storage
|
|
884
|
-
self.study_name = os.path.basename(self.history.path)
|
|
885
|
-
storage_path = self.history.path.replace('.csv', '.db') # history と同じところに保存
|
|
886
|
-
if self.is_cluster: # remote cluster なら scheduler の working dir に保存
|
|
887
|
-
storage_path = os.path.basename(self.history.path).replace('.csv', '.db')
|
|
888
|
-
|
|
889
|
-
# callback to terminate
|
|
890
|
-
if self.n_trials is not None:
|
|
891
|
-
n_trials = self.n_trials
|
|
892
|
-
|
|
893
|
-
# restart である場合、追加 N 回と見做す
|
|
894
|
-
if self.history.is_restart:
|
|
895
|
-
n_existing_trials = len(self.history.actor_data)
|
|
896
|
-
n_trials += n_existing_trials
|
|
897
|
-
|
|
898
|
-
self.optimize_callbacks.append(MaxTrialsCallback(n_trials, states=(TrialState.COMPLETE,)))
|
|
899
|
-
|
|
900
|
-
# if not restart, create study if storage is not exists
|
|
901
|
-
if not self.history.is_restart:
|
|
902
|
-
|
|
903
|
-
self.storage = optuna.integration.dask.DaskStorage(
|
|
904
|
-
f'sqlite:///{storage_path}',
|
|
905
|
-
)
|
|
906
|
-
|
|
907
|
-
self.study = optuna.create_study(
|
|
908
|
-
study_name=self.study_name,
|
|
909
|
-
storage=self.storage,
|
|
910
|
-
load_if_exists=True,
|
|
911
|
-
directions=['minimize'] * len(self.objectives),
|
|
912
|
-
)
|
|
913
|
-
|
|
914
|
-
# 初期値の設定
|
|
915
|
-
if len(self.study.trials) == 0: # リスタートでなければ
|
|
916
|
-
# ユーザーの指定した初期値
|
|
917
|
-
params = self.get_parameter('dict')
|
|
918
|
-
self.study.enqueue_trial(params, user_attrs={"message": "initial"})
|
|
919
|
-
|
|
920
|
-
# add_initial_parameter で追加された初期値
|
|
921
|
-
for prm, prm_set_name in self.additional_initial_parameter:
|
|
922
|
-
self.study.enqueue_trial(
|
|
923
|
-
prm,
|
|
924
|
-
user_attrs={"message": prm_set_name}
|
|
925
|
-
)
|
|
926
|
-
|
|
927
|
-
# add_init で指定された方法による初期値
|
|
928
|
-
if 'LHS' in self.additional_initial_methods:
|
|
929
|
-
names = []
|
|
930
|
-
bounds = []
|
|
931
|
-
for i, row in self.parameters.iterrows():
|
|
932
|
-
names.append(row['name'])
|
|
933
|
-
lb = row['lb']
|
|
934
|
-
ub = row['ub']
|
|
935
|
-
bounds.append([lb, ub])
|
|
936
|
-
data = generate_lhs(bounds, seed=self.seed)
|
|
937
|
-
for datum in data:
|
|
938
|
-
d = {}
|
|
939
|
-
for name, v in zip(names, datum):
|
|
940
|
-
d[name] = v
|
|
941
|
-
self.study.enqueue_trial(
|
|
942
|
-
d, user_attrs={"message": "additional initial (Latin Hypercube Sampling)"}
|
|
943
|
-
)
|
|
944
|
-
|
|
945
|
-
# if is_restart, load study
|
|
946
|
-
else:
|
|
947
|
-
if not os.path.exists(storage_path):
|
|
948
|
-
msg = f'{storage_path} が見つかりません。'
|
|
949
|
-
msg += '.db ファイルは .csv ファイルと同じフォルダに生成されます。'
|
|
950
|
-
msg += 'クラスター解析の場合は、スケジューラを起動したフォルダに生成されます。'
|
|
951
|
-
raise FileNotFoundError(msg)
|
|
952
|
-
self.storage = optuna.integration.dask.DaskStorage(
|
|
953
|
-
f'sqlite:///{storage_path}',
|
|
954
|
-
)
|
|
955
|
-
|
|
956
|
-
def add_init_parameter(
|
|
957
|
-
self,
|
|
958
|
-
parameter: dict or Iterable,
|
|
959
|
-
name: str or None = None,
|
|
960
|
-
):
|
|
961
|
-
"""Add additional initial parameter for evaluate.
|
|
962
|
-
|
|
963
|
-
The parameter set is ignored if the main() is continued.
|
|
964
|
-
|
|
965
|
-
Args:
|
|
966
|
-
parameter (dict or Iterable): Parameter to evaluate before run optimization algorithm.
|
|
967
|
-
name (str or None): Optional. If specified, the name is saved in the history row. Default to None.
|
|
968
|
-
|
|
969
|
-
"""
|
|
970
|
-
if name is None:
|
|
971
|
-
name = 'additional initial'
|
|
972
|
-
else:
|
|
973
|
-
name = f'additional initial ({name})'
|
|
974
|
-
self.additional_initial_parameter.append([parameter, name])
|
|
975
|
-
|
|
976
|
-
def main(self, subprocess_idx=0):
|
|
977
|
-
"""Set random seed, sampler, study and run study.optimize()."""
|
|
978
|
-
|
|
979
|
-
# (re)set random seed
|
|
980
|
-
seed = self.seed
|
|
981
|
-
if seed is not None:
|
|
982
|
-
if subprocess_idx is not None:
|
|
983
|
-
seed += subprocess_idx
|
|
984
|
-
|
|
985
|
-
# restore sampler
|
|
986
|
-
sampler = self.sampler_class(
|
|
987
|
-
seed=seed,
|
|
988
|
-
constraints_func=self._constraint,
|
|
989
|
-
**self.sampler_kwargs
|
|
990
|
-
)
|
|
991
|
-
|
|
992
|
-
# load study
|
|
993
|
-
study = optuna.load_study(
|
|
994
|
-
study_name=self.study_name,
|
|
995
|
-
storage=self.storage,
|
|
996
|
-
sampler=sampler,
|
|
997
|
-
)
|
|
998
|
-
|
|
999
|
-
# run
|
|
1000
|
-
study.optimize(
|
|
1001
|
-
self._objective,
|
|
1002
|
-
timeout=self.timeout,
|
|
1003
|
-
callbacks=self.optimize_callbacks,
|
|
1004
|
-
)
|
|
1005
|
-
|
|
1006
|
-
|
|
1007
|
-
class FEMOpt:
|
|
1008
|
-
"""Base class to control FEM interface and optimizer.
|
|
1009
|
-
|
|
1010
|
-
Attributes:
|
|
1011
|
-
fem (FEMInterface): The interface of FEM system.
|
|
1012
|
-
client (Client): Dask client. For detail, see dask documentation.
|
|
1013
|
-
scheduler_address (str or None): Dask scheduler address. If None, LocalCluster will be used.
|
|
1014
|
-
status (OptimizationStatus): Entire process status. This contains dask actor.
|
|
1015
|
-
history(History): History of optimization process. This contains dask actor.
|
|
1016
|
-
history_path (str): The path to the history (.csv) file.
|
|
1017
|
-
worker_status_list([OptimizationStatus]): Process status of each dask worker.
|
|
1018
|
-
monitor_process_future(Future): Future of monitor server process. This is dask future.
|
|
1019
|
-
monitor_server_kwargs(dict): Monitor server parameter. Currently, the valid arguments are hostname and port.
|
|
1020
|
-
|
|
1021
|
-
"""
|
|
1022
|
-
|
|
1023
|
-
def __init__(
|
|
1024
|
-
self,
|
|
1025
|
-
fem: FEMInterface = None,
|
|
1026
|
-
opt: AbstractOptimizer = None,
|
|
1027
|
-
history_path: str = None,
|
|
1028
|
-
scheduler_address: str = None
|
|
1029
|
-
):
|
|
1030
|
-
"""Initializes an FEMOpt instance.
|
|
1031
|
-
|
|
1032
|
-
Args:
|
|
1033
|
-
fem (FEMInterface, optional): The finite element method interface. Defaults to None. If None, automatically set to FemtetInterface.
|
|
1034
|
-
opt (AbstractOptimizer):
|
|
1035
|
-
history_path (str, optional): The path to the history file. Defaults to None. If None, '%Y_%m_%d_%H_%M_%S.csv' is created in current directory.
|
|
1036
|
-
scheduler_address (str or None): If cluster processing, set this parameter like ``"tcp://xxx.xxx.xxx.xxx:xxxx"``.
|
|
1037
|
-
|
|
1038
|
-
"""
|
|
1039
|
-
|
|
1040
|
-
logger.info('Initialize FEMOpt')
|
|
1041
|
-
|
|
1042
|
-
# 引数の処理
|
|
1043
|
-
if history_path is None:
|
|
1044
|
-
history_path = datetime.datetime.now().strftime('%Y%m%d_%H%M%S.csv')
|
|
1045
|
-
self.history_path = os.path.abspath(history_path)
|
|
1046
|
-
self.scheduler_address = scheduler_address
|
|
1047
|
-
|
|
1048
|
-
if fem is None:
|
|
1049
|
-
self.fem = FemtetInterface()
|
|
1050
|
-
else:
|
|
1051
|
-
self.fem = fem
|
|
1052
|
-
|
|
1053
|
-
if opt is None:
|
|
1054
|
-
self.opt = OptunaOptimizer()
|
|
1055
|
-
else:
|
|
1056
|
-
self.opt = opt
|
|
1057
|
-
|
|
1058
|
-
# メンバーの宣言
|
|
1059
|
-
self.client = None
|
|
1060
|
-
self.status = None # actor
|
|
1061
|
-
self.history = None # actor
|
|
1062
|
-
self.worker_status_list = None # [actor]
|
|
1063
|
-
self.monitor_process_future = None
|
|
1064
|
-
self.monitor_server_kwargs = dict()
|
|
1065
|
-
self.monitor_process_worker_name = None
|
|
1066
|
-
|
|
1067
|
-
# multiprocess 時に pickle できないオブジェクト参照の削除
|
|
1068
|
-
def __getstate__(self):
|
|
1069
|
-
state = self.__dict__.copy()
|
|
1070
|
-
del state['fem']
|
|
1071
|
-
return state
|
|
1072
|
-
|
|
1073
|
-
def __setstate__(self, state):
|
|
1074
|
-
self.__dict__.update(state)
|
|
1075
|
-
|
|
1076
|
-
def set_random_seed(self, seed: int):
|
|
1077
|
-
"""Sets the random seed for reproducibility.
|
|
1078
|
-
|
|
1079
|
-
Args:
|
|
1080
|
-
seed (int): The random seed value to be set.
|
|
1081
|
-
|
|
1082
|
-
"""
|
|
1083
|
-
self.opt.seed = seed
|
|
1084
|
-
|
|
1085
|
-
def add_parameter(
|
|
1086
|
-
self,
|
|
1087
|
-
name: str,
|
|
1088
|
-
initial_value: float or None = None,
|
|
1089
|
-
lower_bound: float or None = None,
|
|
1090
|
-
upper_bound: float or None = None,
|
|
1091
|
-
memo: str = ''
|
|
1092
|
-
):
|
|
1093
|
-
"""Adds a parameter to the optimization problem.
|
|
1094
|
-
|
|
1095
|
-
Args:
|
|
1096
|
-
name (str): The name of the parameter.
|
|
1097
|
-
initial_value (float or None, optional): The initial value of the parameter. Defaults to None. If None, try to get initial value from FEMInterface.
|
|
1098
|
-
lower_bound (float or None, optional): The lower bound of the parameter. Defaults to None. However, this argument is required for some algorithms.
|
|
1099
|
-
upper_bound (float or None, optional): The upper bound of the parameter. Defaults to None. However, this argument is required for some algorithms.
|
|
1100
|
-
memo (str, optional): Additional information about the parameter. Defaults to ''.
|
|
1101
|
-
Raises:
|
|
1102
|
-
ValueError: If initial_value is not specified and the value for the given name is also not specified.
|
|
1103
|
-
|
|
1104
|
-
"""
|
|
1105
|
-
|
|
1106
|
-
_check_lb_ub(lower_bound, upper_bound, name)
|
|
1107
|
-
value = self.fem.check_param_value(name)
|
|
1108
|
-
if initial_value is None:
|
|
1109
|
-
if value is not None:
|
|
1110
|
-
initial_value = value
|
|
1111
|
-
else:
|
|
1112
|
-
raise ValueError('initial_value を指定してください.')
|
|
1113
|
-
|
|
1114
|
-
d = {
|
|
1115
|
-
'name': name,
|
|
1116
|
-
'value': float(initial_value),
|
|
1117
|
-
'lb': float(lower_bound),
|
|
1118
|
-
'ub': float(upper_bound),
|
|
1119
|
-
'memo': memo,
|
|
1120
|
-
}
|
|
1121
|
-
pdf = pd.DataFrame(d, index=[0], dtype=object)
|
|
1122
|
-
|
|
1123
|
-
if len(self.opt.parameters) == 0:
|
|
1124
|
-
self.opt.parameters = pdf
|
|
1125
|
-
else:
|
|
1126
|
-
self.opt.parameters = pd.concat([self.opt.parameters, pdf], ignore_index=True)
|
|
1127
|
-
|
|
1128
|
-
def add_objective(
|
|
1129
|
-
self,
|
|
1130
|
-
fun,
|
|
1131
|
-
name: str or None = None,
|
|
1132
|
-
direction: str or float = 'minimize',
|
|
1133
|
-
args: tuple or None = None,
|
|
1134
|
-
kwargs: dict or None = None
|
|
1135
|
-
):
|
|
1136
|
-
"""Adds an objective to the optimization problem.
|
|
1137
|
-
|
|
1138
|
-
Args:
|
|
1139
|
-
fun (callable): The objective function.
|
|
1140
|
-
name (str or None, optional): The name of the objective. Defaults to None.
|
|
1141
|
-
direction (str or float, optional): The optimization direction. Defaults to 'minimize'.
|
|
1142
|
-
args (tuple or None, optional): Additional arguments for the objective function. Defaults to None.
|
|
1143
|
-
kwargs (dict or None, optional): Additional keyword arguments for the objective function. Defaults to None.
|
|
1144
|
-
|
|
1145
|
-
Note:
|
|
1146
|
-
If the FEMInterface is FemtetInterface, the 1st argument of fun should be Femtet (IPyDispatch) object.
|
|
1147
|
-
|
|
1148
|
-
Tip:
|
|
1149
|
-
If name is None, name is a string with the prefix `"obj_"` followed by a sequential number.
|
|
1150
|
-
|
|
1151
|
-
"""
|
|
1152
|
-
|
|
1153
|
-
# 引数の処理
|
|
1154
|
-
if args is None:
|
|
1155
|
-
args = tuple()
|
|
1156
|
-
elif not isinstance(args, tuple):
|
|
1157
|
-
args = (args,)
|
|
1158
|
-
if kwargs is None:
|
|
1159
|
-
kwargs = dict()
|
|
1160
|
-
if name is None:
|
|
1161
|
-
prefix = Objective.default_name
|
|
1162
|
-
i = 0
|
|
1163
|
-
while True:
|
|
1164
|
-
candidate = f'{prefix}_{str(int(i))}'
|
|
1165
|
-
is_existing = candidate in list(self.opt.objectives.keys())
|
|
1166
|
-
if not is_existing:
|
|
1167
|
-
break
|
|
1168
|
-
else:
|
|
1169
|
-
i += 1
|
|
1170
|
-
name = candidate
|
|
1171
|
-
|
|
1172
|
-
self.opt.objectives[name] = Objective(fun, name, direction, args, kwargs)
|
|
1173
|
-
|
|
1174
|
-
|
|
1175
|
-
def add_constraint(
|
|
1176
|
-
self,
|
|
1177
|
-
fun,
|
|
1178
|
-
name: str or None = None,
|
|
1179
|
-
lower_bound: float or None = None,
|
|
1180
|
-
upper_bound: float or None = None,
|
|
1181
|
-
strict: bool = True,
|
|
1182
|
-
args: tuple or None = None,
|
|
1183
|
-
kwargs: dict or None = None,
|
|
1184
|
-
):
|
|
1185
|
-
"""Adds a constraint to the optimization problem.
|
|
1186
|
-
|
|
1187
|
-
Args:
|
|
1188
|
-
fun (callable): The constraint function.
|
|
1189
|
-
name (str or None, optional): The name of the constraint. Defaults to None.
|
|
1190
|
-
lower_bound (float or Non, optional): The lower bound of the constraint. Defaults to None.
|
|
1191
|
-
upper_bound (float or Non, optional): The upper bound of the constraint. Defaults to None.
|
|
1192
|
-
strict (bool, optional): Flag indicating if it is a strict constraint. Defaults to True.
|
|
1193
|
-
args (tuple or None, optional): Additional arguments for the constraint function. Defaults to None.
|
|
1194
|
-
kwargs (dict): Additional arguments for the constraint function. Defaults to None.
|
|
1195
|
-
|
|
1196
|
-
Note:
|
|
1197
|
-
If the FEMInterface is FemtetInterface, the 1st argument of fun should be Femtet (IPyDispatch) object.
|
|
1198
|
-
|
|
1199
|
-
Tip:
|
|
1200
|
-
If name is None, name is a string with the prefix `"cns_"` followed by a sequential number.
|
|
1201
|
-
|
|
1202
|
-
"""
|
|
1203
|
-
|
|
1204
|
-
# 引数の処理
|
|
1205
|
-
if args is None:
|
|
1206
|
-
args = tuple()
|
|
1207
|
-
elif not isinstance(args, tuple):
|
|
1208
|
-
args = (args,)
|
|
1209
|
-
if kwargs is None:
|
|
1210
|
-
kwargs = dict()
|
|
1211
|
-
if name is None:
|
|
1212
|
-
prefix = Constraint.default_name
|
|
1213
|
-
i = 0
|
|
1214
|
-
while True:
|
|
1215
|
-
candidate = f'{prefix}_{str(int(i))}'
|
|
1216
|
-
is_existing = candidate in list(self.opt.constraints.keys())
|
|
1217
|
-
if not is_existing:
|
|
1218
|
-
break
|
|
1219
|
-
else:
|
|
1220
|
-
i += 1
|
|
1221
|
-
name = candidate
|
|
1222
|
-
|
|
1223
|
-
# strict constraint の場合、solve 前に評価したいので Gogh へのアクセスを禁ずる
|
|
1224
|
-
if strict:
|
|
1225
|
-
if _is_access_gogh(fun):
|
|
1226
|
-
message = f'関数 {fun.__name__} に Gogh (Femtet 解析結果)へのアクセスがあります.'
|
|
1227
|
-
message += 'デフォルトでは constraint は解析前に評価され, 条件を満たさない場合解析を行いません.'
|
|
1228
|
-
message += '拘束に解析結果を含めたい場合は, strict=False を設定してください.'
|
|
1229
|
-
raise Exception(message)
|
|
1230
|
-
|
|
1231
|
-
self.opt.constraints[name] = Constraint(fun, name, lower_bound, upper_bound, strict, args, kwargs)
|
|
1232
|
-
|
|
1233
|
-
def get_parameter(self, format='dict'):
|
|
1234
|
-
"""Returns the parameters in the specified format.
|
|
1235
|
-
|
|
1236
|
-
Args:
|
|
1237
|
-
format (str, optional): The desired format of the parameters. Can be 'df' (DataFrame), 'values', or 'dict'. Defaults to 'dict'.
|
|
1238
|
-
|
|
1239
|
-
Returns:
|
|
1240
|
-
object: The parameters in the specified format.
|
|
1241
|
-
|
|
1242
|
-
Raises:
|
|
1243
|
-
ValueError: If an invalid format is provided.
|
|
1244
|
-
|
|
1245
|
-
"""
|
|
1246
|
-
return self.opt.get_parameter(format)
|
|
1247
|
-
|
|
1248
|
-
def set_monitor_host(self, host='localhost', port=None):
|
|
1249
|
-
"""Sets up the monitor server with the specified host and port.
|
|
1250
|
-
|
|
1251
|
-
Args:
|
|
1252
|
-
host (str): The hostname or IP address of the monitor server.
|
|
1253
|
-
port (int or None, optional): The port number of the monitor server. If None, ``8080`` will be used. Defaults to None.
|
|
1254
|
-
|
|
1255
|
-
Tip:
|
|
1256
|
-
If you do not know the host IP address
|
|
1257
|
-
for connecting to the local network,
|
|
1258
|
-
use the ``ipconfig`` command to find out.
|
|
1259
|
-
|
|
1260
|
-
Alternatively, you can specify host as 0.0.0.0
|
|
1261
|
-
to access the monitor server through all network interfaces
|
|
1262
|
-
used by that computer.
|
|
1263
|
-
|
|
1264
|
-
However, please note that in this case,
|
|
1265
|
-
it will be visible to all users on the local network.
|
|
1266
|
-
|
|
1267
|
-
"""
|
|
1268
|
-
self.monitor_server_kwargs = dict(
|
|
1269
|
-
host=host,
|
|
1270
|
-
port=port
|
|
1271
|
-
)
|
|
1272
|
-
|
|
1273
|
-
def main(
|
|
1274
|
-
self,
|
|
1275
|
-
n_trials=None,
|
|
1276
|
-
n_parallel=1,
|
|
1277
|
-
timeout=None,
|
|
1278
|
-
wait_setup=True,
|
|
1279
|
-
):
|
|
1280
|
-
"""Runs the main optimization process.
|
|
1281
|
-
|
|
1282
|
-
Args:
|
|
1283
|
-
n_trials (int or None, optional): The number of trials. Defaults to None.
|
|
1284
|
-
n_parallel (int, optional): The number of parallel processes. Defaults to 1.
|
|
1285
|
-
timeout (float or None, optional): The maximum amount of time in seconds that each trial can run. Defaults to None.
|
|
1286
|
-
wait_setup (bool, optional): Wait for all workers launching FEM system. Defaults to True.
|
|
1287
|
-
|
|
1288
|
-
Tip:
|
|
1289
|
-
If setup_monitor_server() is not executed, a local server for monitoring will be started at localhost:8080.
|
|
1290
|
-
|
|
1291
|
-
Note:
|
|
1292
|
-
If ``n_trials`` and ``timeout`` are both None, it runs forever until interrupting by the user.
|
|
1293
|
-
|
|
1294
|
-
Note:
|
|
1295
|
-
If ``n_parallel`` >= 2, depending on the end timing, ``n_trials`` may be exceeded by up to ``n_parallel-1`` times.
|
|
1296
|
-
|
|
1297
|
-
Warning:
|
|
1298
|
-
If ``n_parallel`` >= 2 and ``fem`` is a subclass of ``FemtetInterface``, the ``strictly_pid_specify`` of subprocess is set to ``False``.
|
|
1299
|
-
So **it is recommended to close all other Femtet processes before running main().**
|
|
1300
|
-
|
|
1301
|
-
"""
|
|
1302
|
-
|
|
1303
|
-
# 共通引数
|
|
1304
|
-
self.opt.n_trials = n_trials
|
|
1305
|
-
self.opt.timeout = timeout
|
|
1306
|
-
|
|
1307
|
-
# クラスターの設定
|
|
1308
|
-
self.opt.is_cluster = self.scheduler_address is not None
|
|
1309
|
-
if self.opt.is_cluster:
|
|
1310
|
-
# 既存のクラスターに接続
|
|
1311
|
-
logger.info('Connecting to existing cluster.')
|
|
1312
|
-
self.client = Client(self.scheduler_address)
|
|
1313
|
-
else:
|
|
1314
|
-
# ローカルクラスターを構築
|
|
1315
|
-
logger.info('Launching single machine cluster. This may take tens of seconds.')
|
|
1316
|
-
cluster = LocalCluster(processes=True)
|
|
1317
|
-
self.client = Client(cluster, direct_to_workers=False)
|
|
1318
|
-
self.scheduler_address = self.client.scheduler.address
|
|
1319
|
-
|
|
1320
|
-
# タスクを振り分ける worker を指定
|
|
1321
|
-
subprocess_indices = list(range(n_parallel))
|
|
1322
|
-
if not self.opt.is_cluster:
|
|
1323
|
-
subprocess_indices = subprocess_indices[1:]
|
|
1324
|
-
worker_addresses = list(self.client.nthreads().keys())
|
|
1325
|
-
if len(subprocess_indices)>0:
|
|
1326
|
-
assert max(subprocess_indices) <= len(worker_addresses)-1, f'コア数{len(worker_addresses)}は不足しています。'
|
|
1327
|
-
worker_addresses = worker_addresses[:len(range(n_parallel))] # TODO: ノードごとに適度に振り分ける
|
|
1328
|
-
if not self.opt.is_cluster:
|
|
1329
|
-
worker_addresses[0] = 'Main'
|
|
1330
|
-
|
|
1331
|
-
# monitor 用 worker を起動
|
|
1332
|
-
logger.info('Launching monitor server. This may take a few seconds.')
|
|
1333
|
-
self.monitor_process_worker_name = datetime.datetime.now().strftime("Monitor-%Y%m%d-%H%M%S")
|
|
1334
|
-
cmd = f'{sys.executable} -m dask worker {self.client.scheduler.address} --name {self.monitor_process_worker_name} --no-nanny'
|
|
1335
|
-
current_n_workers = len(self.client.nthreads().keys())
|
|
1336
|
-
Popen(cmd, shell=True) # , stdout=PIPE) --> cause stream error
|
|
1337
|
-
|
|
1338
|
-
# monitor 用 worker が増えるまで待つ
|
|
1339
|
-
self.client.wait_for_workers(n_workers=current_n_workers+1)
|
|
1340
|
-
|
|
1341
|
-
# actor の設定
|
|
1342
|
-
self.status = OptimizationStatus(self.client)
|
|
1343
|
-
self.worker_status_list = [OptimizationStatus(self.client, name) for name in worker_addresses] # tqdm 検討
|
|
1344
|
-
self.status.set(OptimizationStatus.SETTING_UP)
|
|
1345
|
-
self.history = History(self.history_path, self.client)
|
|
1346
|
-
self.history.init(
|
|
1347
|
-
self.opt.parameters['name'].to_list(),
|
|
1348
|
-
list(self.opt.objectives.keys()),
|
|
1349
|
-
list(self.opt.constraints.keys()),
|
|
1350
|
-
)
|
|
1351
|
-
|
|
1352
|
-
# launch monitor
|
|
1353
|
-
self.monitor_process_future = self.client.submit(
|
|
1354
|
-
start_monitor_server,
|
|
1355
|
-
self.history,
|
|
1356
|
-
self.status,
|
|
1357
|
-
worker_addresses,
|
|
1358
|
-
self.worker_status_list,
|
|
1359
|
-
**self.monitor_server_kwargs, # kwargs
|
|
1360
|
-
workers=self.monitor_process_worker_name, # if invalid arg,
|
|
1361
|
-
allow_other_workers=False
|
|
1362
|
-
)
|
|
1363
|
-
|
|
1364
|
-
# fem
|
|
1365
|
-
self.fem.setup_before_parallel(self.client)
|
|
1366
|
-
|
|
1367
|
-
# opt
|
|
1368
|
-
self.opt.fem_class = type(self.fem)
|
|
1369
|
-
self.opt.fem_kwargs = self.fem.kwargs
|
|
1370
|
-
self.opt.entire_status = self.status
|
|
1371
|
-
self.opt.history = self.history
|
|
1372
|
-
self.opt.setup_before_parallel()
|
|
1373
|
-
|
|
1374
|
-
# クラスターでの計算開始
|
|
1375
|
-
self.status.set(OptimizationStatus.LAUNCHING_FEM)
|
|
1376
|
-
start = time()
|
|
1377
|
-
calc_futures = self.client.map(
|
|
1378
|
-
self.opt._main,
|
|
1379
|
-
subprocess_indices,
|
|
1380
|
-
[self.worker_status_list]*len(subprocess_indices),
|
|
1381
|
-
[wait_setup]*len(subprocess_indices),
|
|
1382
|
-
workers=worker_addresses,
|
|
1383
|
-
allow_other_workers=False,
|
|
1384
|
-
)
|
|
1385
|
-
|
|
1386
|
-
t_main = None
|
|
1387
|
-
if not self.opt.is_cluster:
|
|
1388
|
-
# ローカルプロセスでの計算(opt._main 相当の処理)
|
|
1389
|
-
subprocess_idx = 0
|
|
1390
|
-
|
|
1391
|
-
# set_fem
|
|
1392
|
-
self.opt.fem = self.fem
|
|
1393
|
-
self.opt.set_fem(skip_reconstruct=True)
|
|
1394
|
-
|
|
1395
|
-
t_main = Thread(
|
|
1396
|
-
target=self.opt._main,
|
|
1397
|
-
args=(
|
|
1398
|
-
subprocess_idx,
|
|
1399
|
-
self.worker_status_list,
|
|
1400
|
-
wait_setup,
|
|
1401
|
-
),
|
|
1402
|
-
kwargs=dict(
|
|
1403
|
-
skip_set_fem=True,
|
|
1404
|
-
)
|
|
1405
|
-
)
|
|
1406
|
-
t_main.start()
|
|
1407
|
-
|
|
1408
|
-
# save history
|
|
1409
|
-
def save_history():
|
|
1410
|
-
while True:
|
|
1411
|
-
sleep(2)
|
|
1412
|
-
try:
|
|
1413
|
-
self.history.actor_data.to_csv(self.history.path, index=None, encoding='shift-jis')
|
|
1414
|
-
except PermissionError:
|
|
1415
|
-
pass
|
|
1416
|
-
if self.status.get() == OptimizationStatus.TERMINATED:
|
|
1417
|
-
break
|
|
1418
|
-
t_save_history = Thread(target=save_history)
|
|
1419
|
-
t_save_history.start()
|
|
1420
|
-
|
|
1421
|
-
# 終了を待つ
|
|
1422
|
-
self.client.gather(calc_futures)
|
|
1423
|
-
if not self.opt.is_cluster: # 既存の fem を使っているならそれも待つ
|
|
1424
|
-
if t_main is not None:
|
|
1425
|
-
t_main.join()
|
|
1426
|
-
self.status.set(OptimizationStatus.TERMINATED)
|
|
1427
|
-
end = time()
|
|
1428
|
-
|
|
1429
|
-
# 一応
|
|
1430
|
-
t_save_history.join()
|
|
1431
|
-
|
|
1432
|
-
logger.info(f'計算が終了しました. 実行時間は {int(end - start)} 秒でした。ウィンドウを閉じると終了します.')
|
|
1433
|
-
logger.info(f'結果は{self.history.path}を確認してください.')
|
|
1434
|
-
|
|
1435
|
-
def terminate_all(self):
|
|
1436
|
-
"""Try to terminate all launched processes.
|
|
1437
|
-
|
|
1438
|
-
If distributed computing, Scheduler and Workers will NOT be terminated.
|
|
1439
|
-
|
|
1440
|
-
"""
|
|
1441
|
-
|
|
1442
|
-
# monitor が terminated 状態で少なくとも一度更新されなければ running のまま固まる
|
|
1443
|
-
sleep(1)
|
|
1444
|
-
|
|
1445
|
-
# terminate monitor process
|
|
1446
|
-
self.status.set(OptimizationStatus.TERMINATE_ALL)
|
|
1447
|
-
logger.info(self.monitor_process_future.result())
|
|
1448
|
-
sleep(1)
|
|
1449
|
-
|
|
1450
|
-
# terminate actors
|
|
1451
|
-
self.client.cancel(self.history._future, force=True)
|
|
1452
|
-
self.client.cancel(self.status._future, force=True)
|
|
1453
|
-
for worker_status in self.worker_status_list:
|
|
1454
|
-
self.client.cancel(worker_status._future, force=True)
|
|
1455
|
-
logger.info('Terminate actors.')
|
|
1456
|
-
sleep(1)
|
|
1457
|
-
|
|
1458
|
-
# terminate monitor worker
|
|
1459
|
-
n_workers = len(self.client.nthreads())
|
|
1460
|
-
self.client.retire_workers(
|
|
1461
|
-
names=[self.monitor_process_worker_name],
|
|
1462
|
-
close_workers=True,
|
|
1463
|
-
remove=True,
|
|
1464
|
-
)
|
|
1465
|
-
while n_workers == len(self.client.nthreads()):
|
|
1466
|
-
sleep(1)
|
|
1467
|
-
logger.info('Terminate monitor processes worker.')
|
|
1468
|
-
sleep(1)
|
|
1469
|
-
|
|
1470
|
-
# close scheduler, other workers(, cluster)
|
|
1471
|
-
self.client.close()
|
|
1472
|
-
while self.client.scheduler is not None:
|
|
1473
|
-
sleep(1)
|
|
1474
|
-
logger.info('Terminate client.')
|
|
1475
|
-
|
|
1476
|
-
# close FEM (if specified to quit when deconstruct)
|
|
1477
|
-
del self.fem
|
|
1478
|
-
logger.info('Terminate FEM.')
|
|
1479
|
-
|
|
1480
|
-
# terminate dask relative processes.
|
|
1481
|
-
if not self.opt.is_cluster:
|
|
1482
|
-
self.client.shutdown()
|
|
1483
|
-
logger.info('Terminate all relative processes.')
|
|
1484
|
-
sleep(3)
|
|
1485
|
-
|
|
1486
|
-
|
|
1487
|
-
def start_monitor_server(history, status, worker_addresses, worker_status_list, host='localhost', port=8080):
|
|
1488
|
-
monitor = Monitor(history, status, worker_addresses, worker_status_list)
|
|
1489
|
-
monitor.start_server(worker_addresses, worker_status_list, host, port)
|
|
1490
|
-
return 'Exit monitor server process gracefully'
|