physbo 2.0.0__cp310-cp310-macosx_12_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- physbo/__init__.py +17 -0
- physbo/blm/__init__.py +17 -0
- physbo/blm/basis/__init__.py +8 -0
- physbo/blm/basis/fourier.py +148 -0
- physbo/blm/core/__init__.py +8 -0
- physbo/blm/core/model.py +257 -0
- physbo/blm/inf/__init__.py +8 -0
- physbo/blm/inf/exact.py +192 -0
- physbo/blm/lik/__init__.py +10 -0
- physbo/blm/lik/_src/__init__.py +8 -0
- physbo/blm/lik/_src/cov.py +113 -0
- physbo/blm/lik/gauss.py +136 -0
- physbo/blm/lik/linear.py +117 -0
- physbo/blm/predictor.py +238 -0
- physbo/blm/prior/__init__.py +8 -0
- physbo/blm/prior/gauss.py +215 -0
- physbo/gp/__init__.py +15 -0
- physbo/gp/core/__init__.py +11 -0
- physbo/gp/core/learning.py +364 -0
- physbo/gp/core/model.py +420 -0
- physbo/gp/core/prior.py +207 -0
- physbo/gp/cov/__init__.py +8 -0
- physbo/gp/cov/_src/__init__.py +1 -0
- physbo/gp/cov/_src/enhance_gauss.cpython-310-darwin.so +0 -0
- physbo/gp/cov/gauss.py +393 -0
- physbo/gp/inf/__init__.py +8 -0
- physbo/gp/inf/exact.py +231 -0
- physbo/gp/lik/__init__.py +8 -0
- physbo/gp/lik/gauss.py +179 -0
- physbo/gp/mean/__init__.py +9 -0
- physbo/gp/mean/const.py +150 -0
- physbo/gp/mean/zero.py +66 -0
- physbo/gp/predictor.py +170 -0
- physbo/misc/__init__.py +15 -0
- physbo/misc/_src/__init__.py +1 -0
- physbo/misc/_src/cholupdate.cpython-310-darwin.so +0 -0
- physbo/misc/_src/diagAB.cpython-310-darwin.so +0 -0
- physbo/misc/_src/logsumexp.cpython-310-darwin.so +0 -0
- physbo/misc/_src/traceAB.cpython-310-darwin.so +0 -0
- physbo/misc/centering.py +28 -0
- physbo/misc/gauss_elim.py +35 -0
- physbo/misc/set_config.py +299 -0
- physbo/opt/__init__.py +8 -0
- physbo/opt/adam.py +107 -0
- physbo/predictor.py +261 -0
- physbo/search/__init__.py +11 -0
- physbo/search/discrete/__init__.py +11 -0
- physbo/search/discrete/policy.py +804 -0
- physbo/search/discrete/results.py +192 -0
- physbo/search/discrete_multi/__init__.py +11 -0
- physbo/search/discrete_multi/policy.py +552 -0
- physbo/search/discrete_multi/results.py +128 -0
- physbo/search/pareto.py +206 -0
- physbo/search/score.py +155 -0
- physbo/search/score_multi.py +197 -0
- physbo/search/utility.py +101 -0
- physbo/variable.py +222 -0
- physbo-2.0.0.dist-info/METADATA +110 -0
- physbo-2.0.0.dist-info/RECORD +61 -0
- physbo-2.0.0.dist-info/WHEEL +5 -0
- physbo-2.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MPL-2.0
|
|
2
|
+
# Copyright (C) 2020- The University of Tokyo
|
|
3
|
+
#
|
|
4
|
+
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
5
|
+
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
6
|
+
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
|
7
|
+
|
|
8
|
+
import numpy as np
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class cov:
|
|
12
|
+
"""
|
|
13
|
+
Covariance
|
|
14
|
+
|
|
15
|
+
Attributes
|
|
16
|
+
==========
|
|
17
|
+
params: float
|
|
18
|
+
half of log of variance
|
|
19
|
+
nparams: int
|
|
20
|
+
number of parameters
|
|
21
|
+
sigma2: float
|
|
22
|
+
variance
|
|
23
|
+
prec: float
|
|
24
|
+
inv. of variance
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
def __init__(self, params=None):
|
|
28
|
+
self.params = params
|
|
29
|
+
if self.params is None:
|
|
30
|
+
self.params = np.log(1)
|
|
31
|
+
self.nparams = 1
|
|
32
|
+
self.sigma2, self.prec = self._trans_params(params)
|
|
33
|
+
|
|
34
|
+
def get_cov(self, N, params=None):
|
|
35
|
+
"""
|
|
36
|
+
compute the covariance of prior
|
|
37
|
+
|
|
38
|
+
Parameters
|
|
39
|
+
==========
|
|
40
|
+
N: int
|
|
41
|
+
dimension
|
|
42
|
+
params:
|
|
43
|
+
half of log of variance
|
|
44
|
+
(default: self.params)
|
|
45
|
+
|
|
46
|
+
Returns
|
|
47
|
+
=======
|
|
48
|
+
numpy.ndarray
|
|
49
|
+
NxN covariance matrix
|
|
50
|
+
"""
|
|
51
|
+
if params is None:
|
|
52
|
+
params = self.params
|
|
53
|
+
|
|
54
|
+
sigma2, prec = self._trans_params(params)
|
|
55
|
+
return np.identity(N) * sigma2
|
|
56
|
+
|
|
57
|
+
def get_prec(self, N, params=None):
|
|
58
|
+
"""
|
|
59
|
+
compute the precision of prior
|
|
60
|
+
|
|
61
|
+
Parameters
|
|
62
|
+
==========
|
|
63
|
+
N: int
|
|
64
|
+
dimension
|
|
65
|
+
params:
|
|
66
|
+
half of log of variance
|
|
67
|
+
(default: self.params)
|
|
68
|
+
|
|
69
|
+
Returns
|
|
70
|
+
=======
|
|
71
|
+
numpy.ndarray
|
|
72
|
+
inverse of covariance matrix
|
|
73
|
+
"""
|
|
74
|
+
if params is None:
|
|
75
|
+
params = self.params
|
|
76
|
+
sigma2, prec = self._trans_params(params)
|
|
77
|
+
return np.identity(N) * prec
|
|
78
|
+
|
|
79
|
+
def set_params(self, params):
|
|
80
|
+
"""
|
|
81
|
+
set the parameter
|
|
82
|
+
|
|
83
|
+
Parameters
|
|
84
|
+
==========
|
|
85
|
+
params: float
|
|
86
|
+
half of log of variance
|
|
87
|
+
"""
|
|
88
|
+
self.params = params
|
|
89
|
+
self.sigma2, self.prec = self._trans_params(params)
|
|
90
|
+
|
|
91
|
+
def _trans_params(self, params=None):
|
|
92
|
+
"""
|
|
93
|
+
transform the parameter into variance and precision
|
|
94
|
+
|
|
95
|
+
Parameters
|
|
96
|
+
==========
|
|
97
|
+
params: float
|
|
98
|
+
half of log of variance
|
|
99
|
+
(default: self.params)
|
|
100
|
+
|
|
101
|
+
Returns
|
|
102
|
+
=======
|
|
103
|
+
sigma2: float
|
|
104
|
+
variance
|
|
105
|
+
prec: float
|
|
106
|
+
precision (inv. of variance)
|
|
107
|
+
"""
|
|
108
|
+
if params is None:
|
|
109
|
+
params = np.copy(self.params)
|
|
110
|
+
|
|
111
|
+
sigma2 = np.exp(2 * params)
|
|
112
|
+
prec = 1 / sigma2
|
|
113
|
+
return sigma2, prec
|
physbo/blm/lik/gauss.py
ADDED
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MPL-2.0
|
|
2
|
+
# Copyright (C) 2020- The University of Tokyo
|
|
3
|
+
#
|
|
4
|
+
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
5
|
+
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
6
|
+
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
|
7
|
+
|
|
8
|
+
import numpy as np
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class gauss:
|
|
12
|
+
"""
|
|
13
|
+
Gaussian
|
|
14
|
+
|
|
15
|
+
Attributes
|
|
16
|
+
==========
|
|
17
|
+
linear
|
|
18
|
+
cov: blm.lik.cov
|
|
19
|
+
covariance
|
|
20
|
+
stats
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
def __init__(self, linear, cov):
|
|
24
|
+
self.linear = linear
|
|
25
|
+
self.cov = cov
|
|
26
|
+
self.stats = ()
|
|
27
|
+
|
|
28
|
+
def get_cov(self, N, params=None):
|
|
29
|
+
"""
|
|
30
|
+
Returns covariance matrix
|
|
31
|
+
|
|
32
|
+
Parameters
|
|
33
|
+
==========
|
|
34
|
+
N: int
|
|
35
|
+
dimension
|
|
36
|
+
params: float
|
|
37
|
+
half of log of variance
|
|
38
|
+
(default: self.cov.params)
|
|
39
|
+
|
|
40
|
+
Returns
|
|
41
|
+
=======
|
|
42
|
+
numpy.ndarray
|
|
43
|
+
NxN covariance matrix
|
|
44
|
+
"""
|
|
45
|
+
if params is None:
|
|
46
|
+
params = np.copy(self.cov.params)
|
|
47
|
+
|
|
48
|
+
return self.cov.get_cov(N, params)
|
|
49
|
+
|
|
50
|
+
def get_prec(self, N, params=None):
|
|
51
|
+
"""
|
|
52
|
+
Returns precision matrix
|
|
53
|
+
|
|
54
|
+
Parameters
|
|
55
|
+
==========
|
|
56
|
+
N: int
|
|
57
|
+
dimension
|
|
58
|
+
params: float
|
|
59
|
+
half of log of variance
|
|
60
|
+
(default: self.cov.params)
|
|
61
|
+
|
|
62
|
+
Returns
|
|
63
|
+
=======
|
|
64
|
+
numpy.ndarray
|
|
65
|
+
NxN precision matrix
|
|
66
|
+
"""
|
|
67
|
+
|
|
68
|
+
if params is None:
|
|
69
|
+
params = np.copy(self.cov.params)
|
|
70
|
+
|
|
71
|
+
return self.cov.get_cov(N, params)
|
|
72
|
+
|
|
73
|
+
def get_basis(self, X):
|
|
74
|
+
"""
|
|
75
|
+
calculates value of basis function at input
|
|
76
|
+
|
|
77
|
+
Parameters
|
|
78
|
+
==========
|
|
79
|
+
X: numpy.ndarray
|
|
80
|
+
input
|
|
81
|
+
|
|
82
|
+
See also
|
|
83
|
+
========
|
|
84
|
+
blm.basis.fourier.get_basis
|
|
85
|
+
"""
|
|
86
|
+
return self.linear.basis.get_basis(X)
|
|
87
|
+
|
|
88
|
+
def get_mean(self, X, Psi=None, params=None, bias=None):
|
|
89
|
+
"""
|
|
90
|
+
calculates mean value
|
|
91
|
+
|
|
92
|
+
Parameters
|
|
93
|
+
==========
|
|
94
|
+
X: numpy.ndarray
|
|
95
|
+
raw input
|
|
96
|
+
Psi: numpy.ndarray
|
|
97
|
+
value of feature maps
|
|
98
|
+
params: numpy.ndarray
|
|
99
|
+
weight
|
|
100
|
+
bias: float
|
|
101
|
+
bias
|
|
102
|
+
|
|
103
|
+
See also
|
|
104
|
+
========
|
|
105
|
+
blm.basis.fourier.get_mean
|
|
106
|
+
"""
|
|
107
|
+
return self.linear.get_mean(X, Psi, params, bias)
|
|
108
|
+
|
|
109
|
+
def set_params(self, params):
|
|
110
|
+
"""
|
|
111
|
+
sets parameters
|
|
112
|
+
"""
|
|
113
|
+
self.linear.set_params(params)
|
|
114
|
+
|
|
115
|
+
def set_bias(self, bias):
|
|
116
|
+
"""
|
|
117
|
+
sets bias
|
|
118
|
+
"""
|
|
119
|
+
self.linear.set_bias(bias)
|
|
120
|
+
|
|
121
|
+
def sampling(self, fmean):
|
|
122
|
+
"""
|
|
123
|
+
draws samples
|
|
124
|
+
|
|
125
|
+
Parameters
|
|
126
|
+
==========
|
|
127
|
+
fmean: numpy.ndarray
|
|
128
|
+
means of samples
|
|
129
|
+
|
|
130
|
+
Returns
|
|
131
|
+
=======
|
|
132
|
+
samples: numpy.ndarray
|
|
133
|
+
"""
|
|
134
|
+
num_data = fmean.shape[0]
|
|
135
|
+
eps = np.sqrt(self.cov.sigma2) * np.random.randn(num_data)
|
|
136
|
+
return fmean + eps
|
physbo/blm/lik/linear.py
ADDED
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MPL-2.0
|
|
2
|
+
# Copyright (C) 2020- The University of Tokyo
|
|
3
|
+
#
|
|
4
|
+
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
5
|
+
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
6
|
+
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
|
7
|
+
|
|
8
|
+
import numpy as np
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class linear:
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
Attributes
|
|
15
|
+
==========
|
|
16
|
+
basis:
|
|
17
|
+
basis for random feature map
|
|
18
|
+
nbasis: int
|
|
19
|
+
number of basis
|
|
20
|
+
bias:
|
|
21
|
+
params:
|
|
22
|
+
_init_params:
|
|
23
|
+
initial value of the parameter
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
def __init__(self, basis, params=None, bias=None):
|
|
27
|
+
self.basis = basis
|
|
28
|
+
self.nbasis = basis.nbasis
|
|
29
|
+
self._init_params = params
|
|
30
|
+
self.bias = bias
|
|
31
|
+
self.params = params
|
|
32
|
+
|
|
33
|
+
if params is None:
|
|
34
|
+
self.params = np.zeros(self.nbasis)
|
|
35
|
+
self.nparams = self.nbasis
|
|
36
|
+
|
|
37
|
+
def get_mean(self, X, Psi=None, params=None, bias=None):
|
|
38
|
+
"""
|
|
39
|
+
calculate mean values
|
|
40
|
+
|
|
41
|
+
Parameters
|
|
42
|
+
==========
|
|
43
|
+
X: numpy.ndarray
|
|
44
|
+
input as an N-by-d matrix
|
|
45
|
+
Psi: numpy.ndarray
|
|
46
|
+
feature maps ``Psi(X)`` as an N-by-l matrix
|
|
47
|
+
(default: self.get_basis(X))
|
|
48
|
+
params: numpy.ndarray
|
|
49
|
+
weight as a vector with size l
|
|
50
|
+
(default: self.params)
|
|
51
|
+
bias: float
|
|
52
|
+
(default: self.bias)
|
|
53
|
+
|
|
54
|
+
Returns
|
|
55
|
+
=======
|
|
56
|
+
numpy.ndarray
|
|
57
|
+
Psi * params + bias
|
|
58
|
+
|
|
59
|
+
"""
|
|
60
|
+
if params is None:
|
|
61
|
+
params = np.copy(self.params)
|
|
62
|
+
|
|
63
|
+
if bias is None:
|
|
64
|
+
bias = np.copy(self.bias)
|
|
65
|
+
|
|
66
|
+
if Psi is None:
|
|
67
|
+
Psi = self.get_basis(X)
|
|
68
|
+
|
|
69
|
+
return Psi.dot(params) + bias
|
|
70
|
+
|
|
71
|
+
def set_params(self, params):
|
|
72
|
+
"""
|
|
73
|
+
set parameters
|
|
74
|
+
|
|
75
|
+
Parameters
|
|
76
|
+
==========
|
|
77
|
+
params: np.ndarray
|
|
78
|
+
"""
|
|
79
|
+
self.params = params
|
|
80
|
+
|
|
81
|
+
def set_bias(self, bias):
|
|
82
|
+
"""
|
|
83
|
+
set bias
|
|
84
|
+
|
|
85
|
+
Parameters
|
|
86
|
+
==========
|
|
87
|
+
bias: float
|
|
88
|
+
"""
|
|
89
|
+
self.bias = bias
|
|
90
|
+
|
|
91
|
+
def _init_params(self, params):
|
|
92
|
+
"""
|
|
93
|
+
initialize parameters
|
|
94
|
+
|
|
95
|
+
Parameters
|
|
96
|
+
==========
|
|
97
|
+
params: np.ndarray
|
|
98
|
+
(default: numpy.zeros(self.nbasis))
|
|
99
|
+
"""
|
|
100
|
+
if params is None:
|
|
101
|
+
self.params = np.zeros(self.nbasis)
|
|
102
|
+
|
|
103
|
+
self.params = params
|
|
104
|
+
|
|
105
|
+
def _init_bias(self, bias):
|
|
106
|
+
"""
|
|
107
|
+
initialize bias
|
|
108
|
+
|
|
109
|
+
Parameters
|
|
110
|
+
==========
|
|
111
|
+
bias: float
|
|
112
|
+
(default: 0)
|
|
113
|
+
"""
|
|
114
|
+
if bias is None:
|
|
115
|
+
self.bias = 0
|
|
116
|
+
|
|
117
|
+
self.bias = bias
|
physbo/blm/predictor.py
ADDED
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MPL-2.0
|
|
2
|
+
# Copyright (C) 2020- The University of Tokyo
|
|
3
|
+
#
|
|
4
|
+
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
5
|
+
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
6
|
+
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
|
7
|
+
|
|
8
|
+
import physbo.predictor
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class predictor(physbo.predictor.base_predictor):
|
|
12
|
+
"""Predictor using Baysean linear model
|
|
13
|
+
|
|
14
|
+
Attributes
|
|
15
|
+
==========
|
|
16
|
+
blm: physbo.blm.core.model
|
|
17
|
+
config: physbo.misc.set_config
|
|
18
|
+
configuration
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
def __init__(self, config, model=None):
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
Parameters
|
|
25
|
+
==========
|
|
26
|
+
config: physbo.misc.set_config
|
|
27
|
+
configuration
|
|
28
|
+
model: physbo.gp.core.model
|
|
29
|
+
|
|
30
|
+
See also
|
|
31
|
+
========
|
|
32
|
+
physbo.base_predictor
|
|
33
|
+
"""
|
|
34
|
+
super(predictor, self).__init__(config, model)
|
|
35
|
+
self.blm = None
|
|
36
|
+
|
|
37
|
+
def fit(self, training, num_basis=None):
|
|
38
|
+
"""
|
|
39
|
+
fit model to training dataset
|
|
40
|
+
|
|
41
|
+
Parameters
|
|
42
|
+
==========
|
|
43
|
+
training: physbo.variable
|
|
44
|
+
dataset for training
|
|
45
|
+
num_basis: int
|
|
46
|
+
the number of basis (default: self.config.predict.num_basis)
|
|
47
|
+
"""
|
|
48
|
+
if num_basis is None:
|
|
49
|
+
num_basis = self.config.predict.num_basis
|
|
50
|
+
|
|
51
|
+
if self.model.prior.cov.num_dim is None:
|
|
52
|
+
self.model.prior.cov.num_dim = training.X.shape[1]
|
|
53
|
+
self.model.fit(training.X, training.t, self.config)
|
|
54
|
+
self.blm = self.model.export_blm(num_basis)
|
|
55
|
+
self.delete_stats()
|
|
56
|
+
|
|
57
|
+
def prepare(self, training):
|
|
58
|
+
"""
|
|
59
|
+
initializes model by using training data set
|
|
60
|
+
|
|
61
|
+
Parameters
|
|
62
|
+
==========
|
|
63
|
+
training: physbo.variable
|
|
64
|
+
dataset for training
|
|
65
|
+
"""
|
|
66
|
+
self.blm.prepare(training.X, training.t, training.Z)
|
|
67
|
+
|
|
68
|
+
def delete_stats(self):
|
|
69
|
+
"""
|
|
70
|
+
resets model
|
|
71
|
+
"""
|
|
72
|
+
self.blm.stats = None
|
|
73
|
+
|
|
74
|
+
def get_basis(self, X):
|
|
75
|
+
"""
|
|
76
|
+
calculates feature maps Psi(X)
|
|
77
|
+
|
|
78
|
+
Parameters
|
|
79
|
+
==========
|
|
80
|
+
X: numpy.ndarray
|
|
81
|
+
inputs
|
|
82
|
+
|
|
83
|
+
Returns
|
|
84
|
+
=======
|
|
85
|
+
Psi: numpy.ndarray
|
|
86
|
+
feature maps
|
|
87
|
+
"""
|
|
88
|
+
return self.blm.lik.get_basis(X)
|
|
89
|
+
|
|
90
|
+
def get_post_fmean(self, training, test):
|
|
91
|
+
"""
|
|
92
|
+
calculates posterior mean value of model
|
|
93
|
+
|
|
94
|
+
Parameters
|
|
95
|
+
==========
|
|
96
|
+
training: physbo.variable
|
|
97
|
+
training dataset. If already trained, the model does not use this.
|
|
98
|
+
test: physbo.variable
|
|
99
|
+
inputs
|
|
100
|
+
|
|
101
|
+
Returns
|
|
102
|
+
=======
|
|
103
|
+
numpy.ndarray
|
|
104
|
+
"""
|
|
105
|
+
if self.blm.stats is None:
|
|
106
|
+
self.prepare(training)
|
|
107
|
+
return self.blm.get_post_fmean(test.X, test.Z)
|
|
108
|
+
|
|
109
|
+
def get_post_fcov(self, training, test):
|
|
110
|
+
"""
|
|
111
|
+
calculates posterior variance-covariance matrix of model
|
|
112
|
+
|
|
113
|
+
Parameters
|
|
114
|
+
==========
|
|
115
|
+
training: physbo.variable
|
|
116
|
+
training dataset. If already trained, the model does not use this.
|
|
117
|
+
test: physbo.variable
|
|
118
|
+
inputs
|
|
119
|
+
|
|
120
|
+
Returns
|
|
121
|
+
=======
|
|
122
|
+
numpy.ndarray
|
|
123
|
+
"""
|
|
124
|
+
if self.blm.stats is None:
|
|
125
|
+
self.prepare(training)
|
|
126
|
+
return self.blm.get_post_fcov(test.X, test.Z)
|
|
127
|
+
|
|
128
|
+
def get_post_params(self, training, test):
|
|
129
|
+
"""
|
|
130
|
+
calculates posterior weights
|
|
131
|
+
|
|
132
|
+
Parameters
|
|
133
|
+
==========
|
|
134
|
+
training: physbo.variable
|
|
135
|
+
training dataset. If already trained, the model does not use this.
|
|
136
|
+
test: physbo.variable
|
|
137
|
+
inputs (not used)
|
|
138
|
+
|
|
139
|
+
Returns
|
|
140
|
+
=======
|
|
141
|
+
numpy.ndarray
|
|
142
|
+
"""
|
|
143
|
+
if self.blm.stats is None:
|
|
144
|
+
self.prepare(training)
|
|
145
|
+
return self.blm.get_post_params_mean()
|
|
146
|
+
|
|
147
|
+
def get_post_samples(self, training, test, N=1, alpha=1.0):
|
|
148
|
+
"""
|
|
149
|
+
draws samples of mean values of model
|
|
150
|
+
|
|
151
|
+
Parameters
|
|
152
|
+
==========
|
|
153
|
+
training: physbo.variable
|
|
154
|
+
training dataset. If already trained, the model does not use this.
|
|
155
|
+
test: physbo.variable
|
|
156
|
+
inputs
|
|
157
|
+
N: int
|
|
158
|
+
number of samples
|
|
159
|
+
(default: 1)
|
|
160
|
+
alpha: float
|
|
161
|
+
noise for sampling source
|
|
162
|
+
(default: 1.0)
|
|
163
|
+
|
|
164
|
+
Returns
|
|
165
|
+
=======
|
|
166
|
+
numpy.ndarray
|
|
167
|
+
"""
|
|
168
|
+
if self.blm.stats is None:
|
|
169
|
+
self.prepare(training)
|
|
170
|
+
return self.blm.post_sampling(test.X, Psi=test.Z, N=N, alpha=alpha)
|
|
171
|
+
|
|
172
|
+
def get_predict_samples(self, training, test, N=1):
|
|
173
|
+
"""
|
|
174
|
+
draws samples of values of model
|
|
175
|
+
|
|
176
|
+
Parameters
|
|
177
|
+
==========
|
|
178
|
+
training: physbo.variable
|
|
179
|
+
training dataset. If already trained, the model does not use this.
|
|
180
|
+
test: physbo.variable
|
|
181
|
+
inputs
|
|
182
|
+
N: int
|
|
183
|
+
number of samples
|
|
184
|
+
(default: 1)
|
|
185
|
+
alpha: float
|
|
186
|
+
noise for sampling source
|
|
187
|
+
(default: 1.0)
|
|
188
|
+
|
|
189
|
+
Returns
|
|
190
|
+
=======
|
|
191
|
+
numpy.ndarray (N x len(test))
|
|
192
|
+
"""
|
|
193
|
+
if self.blm.stats is None:
|
|
194
|
+
self.prepare(training)
|
|
195
|
+
return self.blm.predict_sampling(test.X, Psi=test.Z, N=N).transpose()
|
|
196
|
+
|
|
197
|
+
def update(self, training, test):
|
|
198
|
+
"""
|
|
199
|
+
updates the model.
|
|
200
|
+
|
|
201
|
+
If not yet initialized (prepared), the model will be prepared by ``training``.
|
|
202
|
+
Otherwise, the model will be updated by ``test``.
|
|
203
|
+
|
|
204
|
+
Parameters
|
|
205
|
+
==========
|
|
206
|
+
training: physbo.variable
|
|
207
|
+
training dataset for initialization (preparation).
|
|
208
|
+
If already prepared, the model ignore this.
|
|
209
|
+
test: physbo.variable
|
|
210
|
+
training data for update.
|
|
211
|
+
If not prepared, the model ignore this.
|
|
212
|
+
"""
|
|
213
|
+
if self.model.stats is None:
|
|
214
|
+
self.prepare(training)
|
|
215
|
+
return None
|
|
216
|
+
|
|
217
|
+
if hasattr(test.t, "__len__"):
|
|
218
|
+
N = len(test.t)
|
|
219
|
+
else:
|
|
220
|
+
N = 1
|
|
221
|
+
|
|
222
|
+
if N == 1:
|
|
223
|
+
if test.Z is None:
|
|
224
|
+
if test.X.ndim == 1:
|
|
225
|
+
self.blm.update_stats(test.X, test.t)
|
|
226
|
+
else:
|
|
227
|
+
self.blm.update_stats(test.X[0, :], test.t)
|
|
228
|
+
else:
|
|
229
|
+
if test.Z.ndim == 1:
|
|
230
|
+
self.blm.update_stats(test.X, test.t, psi=test.Z)
|
|
231
|
+
else:
|
|
232
|
+
self.blm.update_stats(test.X[0, :], test.t, psi=test.Z[0, :])
|
|
233
|
+
else:
|
|
234
|
+
for n in range(N):
|
|
235
|
+
if test.Z is None:
|
|
236
|
+
self.blm.update_stats(test.X[n, :], test.t[n])
|
|
237
|
+
else:
|
|
238
|
+
self.blm.update_stats(test.X[n, :], test.t[n], psi=test.Z[n, :])
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MPL-2.0
|
|
2
|
+
# Copyright (C) 2020- The University of Tokyo
|
|
3
|
+
#
|
|
4
|
+
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
5
|
+
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
6
|
+
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
|
7
|
+
|
|
8
|
+
from .gauss import gauss
|