sgptools 1.1.7__py3-none-any.whl → 1.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
sgptools/__init__.py CHANGED
@@ -12,7 +12,7 @@ The library includes python code for the following:
12
12
 
13
13
  """
14
14
 
15
- __version__ = "1.1.7"
15
+ __version__ = "1.2.0"
16
16
  __author__ = 'Kalvik'
17
17
 
18
18
  from .models.core import *
@@ -1,7 +1,9 @@
1
1
  # sgptools/kernels/__init__.py
2
2
 
3
- """Special kernel functions in this package:
3
+ """Special non-stationary kernel functions in this package:
4
4
 
5
- - `neural_kernel`: Provides a neural spectral kernel function that uses a mixture of multilayer perceptrons
5
+ - `neural_kernel`: Provides the neural spectral kernel that uses a mixture of multilayer perceptrons
6
+ - `attentive_kernel`: Provides the attentive kernel that uses a multilayer perceptron to get a mixture of RBF kernels
7
+ - `neural_network`: Helper class that provides a multilayer perceptron compatible with GPFlow
6
8
 
7
9
  """
@@ -0,0 +1,119 @@
1
+ # Copyright 2024 The SGP-Tools Contributors. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Attentive Kernel function
16
+ """
17
+
18
+ import numpy as np
19
+ import tensorflow as tf
20
+
21
+ import gpflow
22
+ from gpflow.config import default_float
23
+ float_type = default_float()
24
+
25
+ from .neural_network import NN
26
+
27
+
28
+ class AttentiveKernel(gpflow.kernels.Kernel):
29
+ """Attentive Kernel function (non-stationary kernel function).
30
+ Based on the implementation from this [repo](https://github.com/Weizhe-Chen/attentive_kernels)
31
+
32
+ Refer to the following papers for more details:
33
+ - AK: Attentive Kernel for Information Gathering [Chen et al., 2022]
34
+
35
+ Args:
36
+ lengthscales (List): List of lengthscales to use in the mixture components. The lengthscales are not trained.
37
+ amplitude (int): Initial amplitude of the kernel function
38
+ dim_hidden (int): Number of MLP hidden layer nodes (The NN will have two of these layers)
39
+ num_dim (int): Number of dimensions of the data points
40
+ """
41
+ def __init__(self,
42
+ lengthscales,
43
+ dim_hidden=10,
44
+ amplitude=1.0,
45
+ num_dim=2):
46
+ super().__init__()
47
+ with self.name_scope:
48
+ self.num_lengthscales = len(lengthscales)
49
+ self._free_amplitude = tf.Variable(amplitude,
50
+ shape=[],
51
+ trainable=True,
52
+ dtype=float_type)
53
+ self.lengthscales = tf.Variable(lengthscales,
54
+ shape=[self.num_lengthscales],
55
+ trainable=False,
56
+ dtype=float_type)
57
+
58
+ self.nn = NN([num_dim, dim_hidden, dim_hidden, self.num_lengthscales])
59
+
60
+ def get_representations(self, X):
61
+ Z = self.nn(X)
62
+ representations = Z / tf.norm(Z, axis=1, keepdims=True)
63
+ return representations
64
+
65
+ def K(self, X, X2=None):
66
+ """Computes the covariances between/amongst the input variables
67
+
68
+ Args:
69
+ X (ndarray): Variables to compute the covariance matrix
70
+ X2 (ndarray): If passed, the covariance between X and X2 is computed. Otherwise,
71
+ the covariance between X and X is computed.
72
+
73
+ Returns:
74
+ cov (ndarray): covariance matrix
75
+ """
76
+ if X2 is None:
77
+ X2 = X
78
+
79
+ dist = cdist(X, X2)
80
+ repre1 = self.get_representations(X)
81
+ repre2 = self.get_representations(X2)
82
+
83
+ def get_mixture_component(i):
84
+ attention_lengthscales = tf.tensordot(repre1[:, i], repre2[:, i], axes=0)
85
+ cov_mat = rbf(dist, self.lengthscales[i]) * attention_lengthscales
86
+ return cov_mat
87
+
88
+ cov_mat = tf.map_fn(fn=get_mixture_component,
89
+ elems=tf.range(self.num_lengthscales, dtype=tf.int64),
90
+ fn_output_signature=dist.dtype)
91
+ cov_mat = tf.math.reduce_sum(cov_mat, axis=0)
92
+ attention_inputs = repre1 @ tf.transpose(repre2)
93
+ cov_mat *= self._free_amplitude * attention_inputs
94
+
95
+ return cov_mat
96
+
97
+ def K_diag(self, X):
98
+ return self._free_amplitude * tf.ones((X.shape[0]), dtype=X.dtype)
99
+
100
+ '''
101
+ Helper functions
102
+ '''
103
+ def rbf(dist, lengthscale):
104
+ '''
105
+ RBF kernel function
106
+ '''
107
+ return tf.math.exp(-0.5 * tf.math.square(dist / lengthscale))
108
+
109
+ def cdist(x, y):
110
+ '''
111
+ Calculate the pairwise euclidean distances
112
+ '''
113
+ # Calculate distance for a single row of x.
114
+ per_x_dist = lambda i : tf.norm(x[i:(i+1),:] - y, axis=1)
115
+ # Compute and stack distances for all rows of x.
116
+ dist = tf.map_fn(fn=per_x_dist,
117
+ elems=tf.range(tf.shape(x)[0], dtype=tf.int64),
118
+ fn_output_signature=x.dtype)
119
+ return dist
@@ -15,25 +15,23 @@
15
15
  """Provides a neural spectral kernel function along with an initialization function
16
16
  """
17
17
 
18
+ import tensorflow as tf
18
19
  import numpy as np
19
20
  import gc
20
21
 
21
- import tensorflow as tf
22
- from tensorflow import keras
23
- from tensorflow.keras import layers
24
22
 
25
23
  import gpflow
26
24
  from gpflow.config import default_jitter, default_float
27
25
  from gpflow.models import SGPR
28
26
  from gpflow.models.util import data_input_to_tensor
29
-
30
- gpflow.config.set_default_float(np.float32)
31
27
  float_type = default_float()
32
28
 
29
+ from .neural_network import NN
30
+
33
31
 
34
32
  class NeuralSpectralKernel(gpflow.kernels.Kernel):
35
33
  """Neural Spectral Kernel function (non-stationary kernel function).
36
- Based on the implementation from the following [repo](https://github.com/sremes/nssm-gp/tree/master?tab=readme-ov-file)
34
+ Based on the implementation from this [repo](https://github.com/sremes/nssm-gp/tree/master?tab=readme-ov-file)
37
35
 
38
36
  Refer to the following papers for more details:
39
37
  - Neural Non-Stationary Spectral Kernel [Remes et al., 2018]
@@ -55,12 +53,12 @@ class NeuralSpectralKernel(gpflow.kernels.Kernel):
55
53
  self.length = []
56
54
  self.var = []
57
55
  for q in range(self.Q):
58
- freq = keras.Sequential([layers.Dense(hidden_sizes[i], activation='selu') for i in range(self.num_hidden)] +
59
- [layers.Dense(input_dim, activation='softplus')])
60
- length = keras.Sequential([layers.Dense(hidden_sizes[i], activation='selu') for i in range(self.num_hidden)] +
61
- [layers.Dense(input_dim, activation='softplus')])
62
- var = keras.Sequential([layers.Dense(hidden_sizes[i], activation='selu') for i in range(self.num_hidden)] +
63
- [layers.Dense(1, activation='softplus')])
56
+ freq = NN([input_dim]+[hidden_sizes[i] for i in range(self.num_hidden)]+[input_dim],
57
+ output_activation_fn='softplus')
58
+ length = NN([input_dim]+[hidden_sizes[i] for i in range(self.num_hidden)]+[input_dim],
59
+ output_activation_fn='softplus')
60
+ var = NN([input_dim]+[hidden_sizes[i] for i in range(self.num_hidden)]+[1],
61
+ output_activation_fn='softplus')
64
62
  self.freq.append(freq)
65
63
  self.length.append(length)
66
64
  self.var.append(var)
@@ -132,7 +130,7 @@ def robust_kernel(kern, shape_X):
132
130
 
133
131
  def init_neural_kernel(x, y, inducing_variable, Q, n_inits=1, hidden_sizes=None):
134
132
  """Helper function to initialize a Neural Spectral Kernel function (non-stationary kernel function).
135
- Based on the implementation from the following [repo](https://github.com/sremes/nssm-gp/tree/master?tab=readme-ov-file)
133
+ Based on the implementation from this [repo](https://github.com/sremes/nssm-gp/tree/master?tab=readme-ov-file)
136
134
 
137
135
  Refer to the following papers for more details:
138
136
  - Neural Non-Stationary Spectral Kernel [Remes et al., 2018]
@@ -155,9 +153,9 @@ def init_neural_kernel(x, y, inducing_variable, Q, n_inits=1, hidden_sizes=None)
155
153
  for k in range(n_inits):
156
154
  # gpflow.reset_default_graph_and_session()
157
155
  k = NeuralSpectralKernel(input_dim=input_dim, Q=Q,
158
- hidden_sizes=hidden_sizes)
156
+ hidden_sizes=hidden_sizes)
159
157
  model = SGPR((x, y), inducing_variable=inducing_variable,
160
- kernel=k)
158
+ kernel=k)
161
159
  loglik = model.elbo()
162
160
  if loglik > best_loglik:
163
161
  best_loglik = loglik
@@ -0,0 +1,58 @@
1
+ # Copyright 2024 The SGP-Tools Contributors. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Multi Layer Perceptron Model
16
+ """
17
+
18
+ import numpy as np
19
+ import tensorflow as tf
20
+
21
+ import gpflow
22
+ from gpflow.config import default_float
23
+ float_type = default_float()
24
+
25
+ def xavier(dim_in, dim_out):
26
+ return np.random.randn(dim_in, dim_out)*(2./(dim_in+dim_out))**0.5
27
+
28
+ class NN(gpflow.base.Module):
29
+ """Multi Layer Perceptron Model that is compatible with GPFlow
30
+
31
+ Args:
32
+ dims (List): List of each layer's size, needs input layer dimensions as well
33
+ activation_fn (str): Activation function for each layer
34
+ output_activation_fn (str): Activation function for the last layer
35
+ """
36
+ def __init__(self, dims,
37
+ activation_fn='selu',
38
+ output_activation_fn='softmax'):
39
+ super().__init__()
40
+ self.dims = dims
41
+ self.activation_fn = tf.keras.activations.get(activation_fn)
42
+ self.output_activation_fn = tf.keras.activations.get(output_activation_fn)
43
+ for i, (dim_in, dim_out) in enumerate(zip(dims[:-1], dims[1:])):
44
+ setattr(self, 'W_{}'.format(i), tf.Variable(xavier(dim_in, dim_out),
45
+ dtype=float_type))
46
+ setattr(self, 'b_{}'.format(i), tf.Variable(np.zeros(dim_out),
47
+ dtype=float_type))
48
+
49
+ def __call__(self, X):
50
+ if X is not None:
51
+ for i in range(len(self.dims) - 2):
52
+ W = getattr(self, 'W_{}'.format(i))
53
+ b = getattr(self, 'b_{}'.format(i))
54
+ X = self.activation_fn(tf.matmul(X, W) + b)
55
+ W = getattr(self, 'W_{}'.format(i+1))
56
+ b = getattr(self, 'b_{}'.format(i+1))
57
+ X = self.output_activation_fn(tf.matmul(X, W) + b)
58
+ return X
@@ -79,8 +79,9 @@ class AugmentedSGPR(SGPR):
79
79
  kernel (gpflow.kernels.Kernel): gpflow kernel function
80
80
  """
81
81
  self.likelihood.variance.assign(noise_variance)
82
- self.kernel.lengthscales.assign(kernel.lengthscales)
83
- self.kernel.variance.assign(kernel.variance)
82
+ for self_var, var in zip(self.kernel.trainable_variables,
83
+ kernel.trainable_variables):
84
+ self_var.assign(var)
84
85
 
85
86
  def _common_calculation(self) -> "SGPR.CommonTensors":
86
87
  """
@@ -66,21 +66,24 @@ class OSGPR_VFE(GPModel, InternalDataTrainingLossMixin):
66
66
  Z = np.vstack((old_Z, new_Z))
67
67
  return Z
68
68
 
69
- def update(self, data, inducing_variable=None):
69
+ def update(self, data, inducing_variable=None, update_inducing=True):
70
70
  """Configure the OSGPR to adapt to a new batch of data.
71
71
  Note: The OSGPR needs to be trained using gradient-based approaches after update.
72
72
 
73
73
  Args:
74
74
  data (tuple): (X, y) ndarrays with new batch of inputs (n, d) and labels (n, ndim)
75
+ inducing_variable (ndarray): (m_new, d): New initial inducing points
76
+ update_inducing (bool): Whether to update the inducing points
75
77
  """
76
78
  self.X, self.Y = self.data = gpflow.models.util.data_input_to_tensor(data)
77
79
  self.num_data = self.X.shape[0]
78
80
 
79
81
  # Update the inducing points
80
82
  self.Z_old.assign(self.inducing_variable.Z.numpy())
81
- if inducing_variable is None:
83
+ if inducing_variable is None and update_inducing:
82
84
  inducing_variable = self.init_Z()
83
- self.inducing_variable.Z.assign(inducing_variable)
85
+ if inducing_variable is not None:
86
+ self.inducing_variable.Z.assign(inducing_variable)
84
87
 
85
88
  # Get posterior mean and covariance for the old inducing points
86
89
  mu_old, Su_old = self.predict_f(self.Z_old, full_cov=True)
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: sgptools
3
- Version: 1.1.7
3
+ Version: 1.2.0
4
4
  Summary: Software Suite for Sensor Placement and Informative Path Planning
5
5
  Home-page: https://www.itskalvik.com/sgp-tools
6
6
  Author: Kalvik
@@ -23,8 +23,17 @@ Requires-Dist: hkb_diamondsquare
23
23
  Requires-Dist: tensorflow-probability[tf]>=0.21.0
24
24
  Requires-Dist: tensorflow>=2.13.0; platform_machine != "arm64"
25
25
  Requires-Dist: tensorflow-aarch64>=2.13.0; platform_machine == "arm64"
26
+ Requires-Dist: tensorflow-macos>=2.13.0; platform_system == "Darwin" and platform_machine == "arm64"
26
27
  Requires-Dist: typing_extensions
27
28
  Requires-Dist: gpflow>=2.7.0
28
29
  Requires-Dist: pillow
30
+ Dynamic: author
31
+ Dynamic: author-email
32
+ Dynamic: description
33
+ Dynamic: home-page
34
+ Dynamic: license
35
+ Dynamic: requires-dist
36
+ Dynamic: requires-python
37
+ Dynamic: summary
29
38
 
30
39
  Software Suite for Sensor Placement and Informative Path Planning
@@ -1,6 +1,8 @@
1
- sgptools/__init__.py,sha256=dgJa5rLAj4qD6De6iK-MHAvh7CS7qJkPKrdj5zIXdgY,449
2
- sgptools/kernels/__init__.py,sha256=zRf4y-wJwjXKt1uOnmI5MbzCA6pRlyA7C-eagLfb3d0,190
3
- sgptools/kernels/neural_kernel.py,sha256=9XEjcwwi1Gwj4D5cAZwq5QdWqMaI-Vu2DKgYO58DmPg,6709
1
+ sgptools/__init__.py,sha256=qT2hUpWp9XL7o7EnXNf6cxwujPSocrm3i00DrF407Uk,449
2
+ sgptools/kernels/__init__.py,sha256=oNrHUOPJv8WriHeYoNcgfbNb3EFBXC1oitg_37Udf3c,411
3
+ sgptools/kernels/attentive_kernel.py,sha256=crswMpc6t1fy4590R5zwj02ow6-yTSG6o7gMk702Ozg,4434
4
+ sgptools/kernels/neural_kernel.py,sha256=QxFVhd3wmmaPHHE3-fNQzlsD12DxdT6jDp6le1YhuA8,6443
5
+ sgptools/kernels/neural_network.py,sha256=hVr31bnjyXB5h-IGmZoby81BBufqd89zw3Ok7C-C_vE,2367
4
6
  sgptools/models/__init__.py,sha256=X2lIg9kf1-2MHUswk-VW2dHHcbSLxf6_IuV7lc_kvDc,682
5
7
  sgptools/models/bo.py,sha256=sjs18oRXL-yoNiLoaaoROjaJXqfj_CwouJPe9HgzjL0,4857
6
8
  sgptools/models/cma_es.py,sha256=LjWRcUIcARcFvAHR2F8prPDmgxLzYI0kRwYXzKp3APc,4861
@@ -9,8 +11,8 @@ sgptools/models/greedy_mi.py,sha256=06CY6tm9C3iBYEG_DOuQKDmWIww9Ah0rkeJUXsCR2YU,
9
11
  sgptools/models/greedy_sgp.py,sha256=giddMbU3ohePTdLTcH4fDx-bS9upq1T_K8KUW_Ag6HI,4490
10
12
  sgptools/models/core/__init__.py,sha256=TlUdvrM0A7vSzc5IM8C2Y2kliB1ip7YLEcHHzvuw-C4,482
11
13
  sgptools/models/core/augmented_gpr.py,sha256=NuYwlggz7ho7pvW4-so3ghos5vZ8oK7nRZqvHpAt0Zk,3497
12
- sgptools/models/core/augmented_sgpr.py,sha256=qMP9J4AnOUx9AEZfaPhoyb3RP_2AOhOUCUY4eh7uOi0,7185
13
- sgptools/models/core/osgpr.py,sha256=fyIRtNGWZeRRuojQJQAxDhMCUTKlmh5mXK3iddrPC8A,12199
14
+ sgptools/models/core/augmented_sgpr.py,sha256=Y9CEENt--RaV611bsDMsOpWrc0bSfemlm3qXV-pSZUs,7233
15
+ sgptools/models/core/osgpr.py,sha256=trUwUOLX82BRY2KyMWFygBQkc2PItxGlWPXaAgOhpE4,12442
14
16
  sgptools/models/core/transformations.py,sha256=X7WEKo_lFAYB5HKnFvxFsxfz6CB-jzPfVWcx1sWe2lI,18313
15
17
  sgptools/utils/__init__.py,sha256=jgWqzSDgUbqOTFo8mkqZaTlyz44l3v2XYPJfcHYHjqM,376
16
18
  sgptools/utils/data.py,sha256=ojDq6KzBXbAl5CdpA6A6me0sg5Sah9ZTl2TpFCqgR4c,7464
@@ -18,8 +20,8 @@ sgptools/utils/gpflow.py,sha256=46-_Tl-suxvuX3Y9KI_uiixfyCWQ2T-7BUn-7hesdVM,1004
18
20
  sgptools/utils/metrics.py,sha256=tu8H129n8GuxV5fQIKLcfzPUxd7sp8zEF9qZBOZjNKo,5834
19
21
  sgptools/utils/misc.py,sha256=11nsDEU3imnrvH7ywGMiwtNBcBnJfHX3KaGxFS3eq6w,6223
20
22
  sgptools/utils/tsp.py,sha256=b1Lx1Pj-sv7siX-f0S6d25C3RtvszCl3IP4QbvBckqY,8151
21
- sgptools-1.1.7.dist-info/LICENSE.txt,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
22
- sgptools-1.1.7.dist-info/METADATA,sha256=SE-g1VWus5SyoA0PUmEpgHcQ-UX1EUpVXHPSMW1yACU,944
23
- sgptools-1.1.7.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
24
- sgptools-1.1.7.dist-info/top_level.txt,sha256=2NWH6uQLAOuLB9fG7o1pqf6Jvpe1_hEcuqfSqtUw3gw,9
25
- sgptools-1.1.7.dist-info/RECORD,,
23
+ sgptools-1.2.0.dist-info/LICENSE.txt,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
24
+ sgptools-1.2.0.dist-info/METADATA,sha256=PcvpJgcoDFokeQW0WmXlBMcDYk8dw77FVMLei_P8__M,1205
25
+ sgptools-1.2.0.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
26
+ sgptools-1.2.0.dist-info/top_level.txt,sha256=2NWH6uQLAOuLB9fG7o1pqf6Jvpe1_hEcuqfSqtUw3gw,9
27
+ sgptools-1.2.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.6.0)
2
+ Generator: setuptools (76.0.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5