gensbi-examples 0.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gensbi_examples/__init__.py +0 -0
- gensbi_examples/c2st.py +111 -0
- gensbi_examples/c2st_v2.py.bk +147 -0
- gensbi_examples/graph.py +211 -0
- gensbi_examples/mask.py +80 -0
- gensbi_examples/sbi_tasks.py.bk +417 -0
- gensbi_examples/tasks.py +343 -0
- gensbi_examples/utils.py +15 -0
- gensbi_examples/utils.py.bk +56 -0
- gensbi_examples-0.0.2.dist-info/METADATA +72 -0
- gensbi_examples-0.0.2.dist-info/RECORD +13 -0
- gensbi_examples-0.0.2.dist-info/WHEEL +4 -0
- gensbi_examples-0.0.2.dist-info/licenses/LICENSE +13 -0
|
File without changes
|
gensbi_examples/c2st.py
ADDED
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
|
|
3
|
+
import jax
|
|
4
|
+
from jax import numpy as jnp
|
|
5
|
+
from jax import Array
|
|
6
|
+
import numpy as np
|
|
7
|
+
from sklearn.model_selection import KFold, cross_val_score
|
|
8
|
+
from sklearn.neural_network import MLPClassifier
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def c2st(
|
|
12
|
+
X: Array,
|
|
13
|
+
Y: Array,
|
|
14
|
+
seed: int = 1,
|
|
15
|
+
n_folds: int = 5,
|
|
16
|
+
scoring: str = "accuracy",
|
|
17
|
+
z_score: bool = True,
|
|
18
|
+
noise_scale: Optional[float] = None,
|
|
19
|
+
) -> Array:
|
|
20
|
+
"""Classifier-based 2-sample test returning accuracy
|
|
21
|
+
|
|
22
|
+
Trains classifiers with N-fold cross-validation [1]. Scikit learn MLPClassifier are
|
|
23
|
+
used, with 2 hidden layers of 10x dim each, where dim is the dimensionality of the
|
|
24
|
+
samples X and Y.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
X: Sample 1
|
|
28
|
+
Y: Sample 2
|
|
29
|
+
seed: Seed for sklearn
|
|
30
|
+
n_folds: Number of folds
|
|
31
|
+
z_score: Z-scoring using X
|
|
32
|
+
noise_scale: If passed, will add Gaussian noise with std noise_scale to samples
|
|
33
|
+
|
|
34
|
+
References:
|
|
35
|
+
[1]: https://scikit-learn.org/stable/modules/cross_validation.html
|
|
36
|
+
"""
|
|
37
|
+
if z_score:
|
|
38
|
+
X_mean = jnp.mean(X, axis=0)
|
|
39
|
+
X_std = jnp.std(X, axis=0)
|
|
40
|
+
X = (X - X_mean) / X_std
|
|
41
|
+
Y = (Y - X_mean) / X_std
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
if noise_scale is not None:
|
|
45
|
+
key = jax.random.PRNGKey(seed)
|
|
46
|
+
X += noise_scale * jax.random.normal(key, X.shape) * noise_scale
|
|
47
|
+
Y += noise_scale * jax.random.normal(key, Y.shape) * noise_scale
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
# Convert to numpy if not already
|
|
51
|
+
|
|
52
|
+
X = np.asarray(X)
|
|
53
|
+
Y = np.asarray(Y)
|
|
54
|
+
|
|
55
|
+
ndim = X.shape[1]
|
|
56
|
+
|
|
57
|
+
clf = MLPClassifier(
|
|
58
|
+
activation="relu",
|
|
59
|
+
hidden_layer_sizes=(10 * ndim, 10 * ndim),
|
|
60
|
+
max_iter=10000,
|
|
61
|
+
solver="adam",
|
|
62
|
+
random_state=seed,
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
data = np.concatenate((X, Y))
|
|
66
|
+
target = np.concatenate(
|
|
67
|
+
(
|
|
68
|
+
np.zeros((X.shape[0],)),
|
|
69
|
+
np.ones((Y.shape[0],)),
|
|
70
|
+
)
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
shuffle = KFold(n_splits=n_folds, shuffle=True, random_state=seed)
|
|
74
|
+
scores = cross_val_score(clf, data, target, cv=shuffle, scoring=scoring)
|
|
75
|
+
|
|
76
|
+
scores = np.asarray(np.mean(scores)).astype(np.float32)
|
|
77
|
+
return scores
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def c2st_auc(
|
|
81
|
+
X: Array,
|
|
82
|
+
Y: Array,
|
|
83
|
+
seed: int = 1,
|
|
84
|
+
n_folds: int = 5,
|
|
85
|
+
z_score: bool = True,
|
|
86
|
+
noise_scale: Optional[float] = None,
|
|
87
|
+
) -> Array:
|
|
88
|
+
"""Classifier-based 2-sample test returning AUC (area under curve)
|
|
89
|
+
|
|
90
|
+
Same as c2st, except that it returns ROC AUC rather than accuracy
|
|
91
|
+
|
|
92
|
+
Args:
|
|
93
|
+
X: Sample 1
|
|
94
|
+
Y: Sample 2
|
|
95
|
+
seed: Seed for sklearn
|
|
96
|
+
n_folds: Number of folds
|
|
97
|
+
z_score: Z-scoring using X
|
|
98
|
+
noise_scale: If passed, will add Gaussian noise with std noise_scale to samples
|
|
99
|
+
|
|
100
|
+
Returns:
|
|
101
|
+
Metric
|
|
102
|
+
"""
|
|
103
|
+
return c2st(
|
|
104
|
+
X,
|
|
105
|
+
Y,
|
|
106
|
+
seed=seed,
|
|
107
|
+
n_folds=n_folds,
|
|
108
|
+
scoring="roc_auc",
|
|
109
|
+
z_score=z_score,
|
|
110
|
+
noise_scale=noise_scale,
|
|
111
|
+
)
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
|
|
3
|
+
import jax
|
|
4
|
+
import jax.numpy as jnp
|
|
5
|
+
from jax import Array
|
|
6
|
+
import numpy as np
|
|
7
|
+
from sklearn.model_selection import KFold
|
|
8
|
+
from flax import nnx
|
|
9
|
+
import optax
|
|
10
|
+
|
|
11
|
+
# Define MLP using flax.nnx
|
|
12
|
+
class MLP(nnx.Module):
|
|
13
|
+
def __init__(self, in_dim, hidden_dim, *, rngs):
|
|
14
|
+
self.seq = nnx.Sequential([
|
|
15
|
+
nnx.Linear(in_dim, hidden_dim, rngs= rngs),
|
|
16
|
+
nnx.Relu(),
|
|
17
|
+
nnx.Linear(hidden_dim, hidden_dim, rngs=rngs),
|
|
18
|
+
nnx.Relu(),
|
|
19
|
+
nnx.Linear(hidden_dim, 2, rngs=rngs),
|
|
20
|
+
])
|
|
21
|
+
def __call__(self, x):
|
|
22
|
+
return self.seq(x)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def loss_fn(state, x, y):
|
|
26
|
+
logits = state.value(x)
|
|
27
|
+
labels = jax.nn.one_hot(y, 2)
|
|
28
|
+
loss = optax.softmax_cross_entropy(logits, labels).mean()
|
|
29
|
+
return loss
|
|
30
|
+
|
|
31
|
+
def accuracy_fn(state, x, y):
|
|
32
|
+
logits = state.value(x)
|
|
33
|
+
preds = jnp.argmax(logits, axis=-1)
|
|
34
|
+
return (preds == y).mean()
|
|
35
|
+
|
|
36
|
+
def c2st(
|
|
37
|
+
X: Array,
|
|
38
|
+
Y: Array,
|
|
39
|
+
seed: int = 1,
|
|
40
|
+
n_folds: int = 5,
|
|
41
|
+
z_score: bool = True,
|
|
42
|
+
noise_scale: Optional[float] = None,
|
|
43
|
+
) -> Array:
|
|
44
|
+
"""Classifier-based 2-sample test returning accuracy (using nnx for GPU training)
|
|
45
|
+
|
|
46
|
+
Trains classifiers with N-fold cross-validation [1]. nnx MLP is used, with 2 hidden layers of 10x dim each.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
X: Sample 1
|
|
50
|
+
Y: Sample 2
|
|
51
|
+
seed: Seed for random number generation
|
|
52
|
+
n_folds: Number of folds
|
|
53
|
+
z_score: Z-scoring using X
|
|
54
|
+
noise_scale: If passed, will add Gaussian noise with std noise_scale to samples
|
|
55
|
+
|
|
56
|
+
References:
|
|
57
|
+
[1]: https://scikit-learn.org/stable/modules/cross_validation.html
|
|
58
|
+
"""
|
|
59
|
+
|
|
60
|
+
rngs = nnx.Rngs(seed)
|
|
61
|
+
if z_score:
|
|
62
|
+
X_mean = jnp.mean(X, axis=0)
|
|
63
|
+
X_std = jnp.std(X, axis=0)
|
|
64
|
+
X = (X - X_mean) / X_std
|
|
65
|
+
Y = (Y - X_mean) / X_std
|
|
66
|
+
|
|
67
|
+
if noise_scale is not None:
|
|
68
|
+
key = jax.random.PRNGKey(seed)
|
|
69
|
+
X = X + noise_scale * jax.random.normal(key, X.shape)
|
|
70
|
+
Y = Y + noise_scale * jax.random.normal(key, Y.shape)
|
|
71
|
+
|
|
72
|
+
X = jnp.asarray(X)
|
|
73
|
+
Y = jnp.asarray(Y)
|
|
74
|
+
ndim = X.shape[1]
|
|
75
|
+
|
|
76
|
+
# Prepare data and targets
|
|
77
|
+
data = jnp.concatenate([X, Y], axis=0)
|
|
78
|
+
target = jnp.concatenate([
|
|
79
|
+
jnp.zeros((X.shape[0],), dtype=jnp.int32),
|
|
80
|
+
jnp.ones((Y.shape[0],), dtype=jnp.int32)
|
|
81
|
+
], axis=0)
|
|
82
|
+
|
|
83
|
+
kf = KFold(n_splits=n_folds, shuffle=True, random_state=seed)
|
|
84
|
+
scores = []
|
|
85
|
+
|
|
86
|
+
for fold, (train_idx, test_idx) in enumerate(kf.split(data)):
|
|
87
|
+
x_train, y_train = data[train_idx], target[train_idx]
|
|
88
|
+
x_test, y_test = data[test_idx], target[test_idx]
|
|
89
|
+
|
|
90
|
+
for fold, (train_idx, test_idx) in enumerate(kf.split(data)):
|
|
91
|
+
# Model and optimizer
|
|
92
|
+
key = jax.random.PRNGKey(seed + fold)
|
|
93
|
+
model = MLP(ndim, 10 * ndim, rngs=rngs)
|
|
94
|
+
optimizer = nnx.Optimizer(model, optax.adam(1e-3))
|
|
95
|
+
|
|
96
|
+
@jax.jit
|
|
97
|
+
def train_step(optimizer, x, y):
|
|
98
|
+
def _loss_fn(model):
|
|
99
|
+
return loss_fn(model, x, y)
|
|
100
|
+
loss, grads = nnx.value_and_grad(_loss_fn)(optimizer.target)
|
|
101
|
+
optimizer.update(grads, value=loss)
|
|
102
|
+
return optimizer, loss
|
|
103
|
+
|
|
104
|
+
# Training loop
|
|
105
|
+
n_epochs = 100
|
|
106
|
+
batch_size = min(128, x_train.shape[0])
|
|
107
|
+
n_batches = int(jnp.ceil(x_train.shape[0] / batch_size))
|
|
108
|
+
for epoch in range(n_epochs):
|
|
109
|
+
perm = jax.random.permutation(key, x_train.shape[0])
|
|
110
|
+
x_train_shuffled = x_train[perm]
|
|
111
|
+
y_train_shuffled = y_train[perm]
|
|
112
|
+
for i in range(n_batches):
|
|
113
|
+
start = i * batch_size
|
|
114
|
+
end = min((i + 1) * batch_size, x_train.shape[0])
|
|
115
|
+
xb = x_train_shuffled[start:end]
|
|
116
|
+
yb = y_train_shuffled[start:end]
|
|
117
|
+
optimizer, _ = train_step(optimizer, xb, yb)
|
|
118
|
+
model = optimizer.target
|
|
119
|
+
|
|
120
|
+
score = float(accuracy_fn(model, x_test, y_test))
|
|
121
|
+
|
|
122
|
+
scores.append(score)
|
|
123
|
+
|
|
124
|
+
return np.asarray(np.mean(scores), dtype=np.float32)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
# def c2st_auc(
|
|
128
|
+
# X: Array,
|
|
129
|
+
# Y: Array,
|
|
130
|
+
# seed: int = 1,
|
|
131
|
+
# n_folds: int = 5,
|
|
132
|
+
# z_score: bool = True,
|
|
133
|
+
# noise_scale: Optional[float] = None,
|
|
134
|
+
# ) -> Array:
|
|
135
|
+
# """Classifier-based 2-sample test returning AUC (area under curve)
|
|
136
|
+
|
|
137
|
+
# Same as c2st, except that it returns ROC AUC rather than accuracy
|
|
138
|
+
# """
|
|
139
|
+
# return c2st(
|
|
140
|
+
# X,
|
|
141
|
+
# Y,
|
|
142
|
+
# seed=seed,
|
|
143
|
+
# n_folds=n_folds,
|
|
144
|
+
# scoring="roc_auc",
|
|
145
|
+
# z_score=z_score,
|
|
146
|
+
# noise_scale=noise_scale,
|
|
147
|
+
# )
|
gensbi_examples/graph.py
ADDED
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
|
|
2
|
+
import jax
|
|
3
|
+
import jax.numpy as jnp
|
|
4
|
+
|
|
5
|
+
from functools import partial
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@jax.jit
|
|
9
|
+
def find_ancestors_jax(mask, node):
|
|
10
|
+
"""Find ancestors of a node in a graph.
|
|
11
|
+
|
|
12
|
+
Args:
|
|
13
|
+
mask (Array): Adjacency matrix of a directed graph.
|
|
14
|
+
node (int): Node of interest.
|
|
15
|
+
|
|
16
|
+
Returns:
|
|
17
|
+
_type_: _description_
|
|
18
|
+
"""
|
|
19
|
+
num_nodes = mask.shape[0]
|
|
20
|
+
is_ancestor = jnp.zeros(num_nodes, dtype=jnp.bool_)
|
|
21
|
+
stack = jnp.empty(num_nodes, dtype=jnp.int32)
|
|
22
|
+
stack = stack.at[0].set(node)
|
|
23
|
+
|
|
24
|
+
def body_fn(carry, i):
|
|
25
|
+
is_ancestor, stack = carry
|
|
26
|
+
current_node = stack[i]
|
|
27
|
+
current_parents = mask[current_node, :]
|
|
28
|
+
|
|
29
|
+
def inner_body_fn(carry, j):
|
|
30
|
+
is_ancestor, stack = carry
|
|
31
|
+
value = current_parents[j]
|
|
32
|
+
cond = value & (j != current_node) & (~is_ancestor[j])
|
|
33
|
+
|
|
34
|
+
def true_fn(is_ancestor, stack):
|
|
35
|
+
is_ancestor = is_ancestor.at[j].set(True)
|
|
36
|
+
stack = stack.at[i+1].set(j)
|
|
37
|
+
return is_ancestor, stack
|
|
38
|
+
def false_fn(is_ancestor, stack):
|
|
39
|
+
return is_ancestor, stack
|
|
40
|
+
|
|
41
|
+
is_ancestor, stack = jax.lax.cond(cond, true_fn, false_fn, is_ancestor, stack)
|
|
42
|
+
return (is_ancestor, stack), None
|
|
43
|
+
|
|
44
|
+
(is_ancestor, stack), _ = jax.lax.scan(inner_body_fn, (is_ancestor, stack), jnp.arange(num_nodes))
|
|
45
|
+
return (is_ancestor, stack), None
|
|
46
|
+
|
|
47
|
+
(is_ancestor, stack), _ = jax.lax.scan(body_fn, (is_ancestor, stack), jnp.arange(num_nodes))
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
return is_ancestor
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
@partial(jax.jit, static_argnums=(2,))
|
|
55
|
+
def faithfull_mask(base_mask, condition_mask, conditioned_nodes="unchanged"):
|
|
56
|
+
""" Faithfull mask update for conditioning"""
|
|
57
|
+
|
|
58
|
+
graph = base_mask.astype(jnp.bool_).copy()
|
|
59
|
+
base_mask = base_mask.astype(jnp.bool_) # Rows are paraents, columns are children
|
|
60
|
+
condition_mask = condition_mask.astype(jnp.bool_)
|
|
61
|
+
num_nodes = base_mask.shape[0]
|
|
62
|
+
|
|
63
|
+
def body_fn(carry, i):
|
|
64
|
+
base_mask, condition_mask = carry
|
|
65
|
+
|
|
66
|
+
def condition_case(base_mask, condition_mask):
|
|
67
|
+
# We need to update all ancestors of i
|
|
68
|
+
is_ancestor = find_ancestors_jax(graph, i)
|
|
69
|
+
is_ancestor = is_ancestor & (~condition_mask)
|
|
70
|
+
all_ancestors = jnp.nonzero(is_ancestor, size=num_nodes, fill_value=i)[0]
|
|
71
|
+
# They will now depend on i
|
|
72
|
+
base_mask = base_mask.at[all_ancestors,i].set(True)
|
|
73
|
+
# They will now depend on each other!
|
|
74
|
+
base_mask = base_mask | (is_ancestor[:,None] & is_ancestor[None,:])
|
|
75
|
+
# The parents of all children of i will now depend on each other
|
|
76
|
+
children_of_i = base_mask[:,i]
|
|
77
|
+
parents_of_children_of_i = base_mask & (children_of_i[:,None] & ~children_of_i[None,:])
|
|
78
|
+
parents_of_children_of_i = jnp.any(parents_of_children_of_i, axis=0)
|
|
79
|
+
|
|
80
|
+
return base_mask, condition_mask
|
|
81
|
+
|
|
82
|
+
def uncondition_case(base_mask, condition_mask):
|
|
83
|
+
return base_mask, condition_mask
|
|
84
|
+
|
|
85
|
+
base_mask, condition_mask = jax.lax.cond(condition_mask[i], condition_case, uncondition_case, base_mask, condition_mask)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
return (base_mask, condition_mask), None
|
|
89
|
+
|
|
90
|
+
(base_mask, condition_mask), _ = jax.lax.scan(body_fn, (base_mask, condition_mask), jnp.arange(num_nodes))
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
return base_mask
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
@partial(jax.jit, static_argnums=(2,3))
|
|
98
|
+
def min_faithfull_mask(mask, condition_mask, top_mode=0, conditioned_nodes="unchanged"):
|
|
99
|
+
""" Minimally faithfull mask update for conditioning"""
|
|
100
|
+
num_nodes = mask.shape[0]
|
|
101
|
+
I = moralize(mask)
|
|
102
|
+
H = jnp.zeros_like(mask, dtype=jnp.bool_)
|
|
103
|
+
# 0 is child, 1 is parent
|
|
104
|
+
UPSTREAM = top_mode
|
|
105
|
+
DOWNSTREAM = 1 - top_mode
|
|
106
|
+
num_parents_or_childs = jnp.sum(mask & (~condition_mask[None, :] & ~condition_mask[:, None]), axis=UPSTREAM)
|
|
107
|
+
#print(num_parents_or_childs)
|
|
108
|
+
S = (num_parents_or_childs == 1) & (~condition_mask) # Frontier set
|
|
109
|
+
M = jnp.zeros((num_nodes), dtype=jnp.bool_) # Marked nodes
|
|
110
|
+
|
|
111
|
+
def cond_fn(val):
|
|
112
|
+
S, _, _, _ = val
|
|
113
|
+
return jnp.any(S)
|
|
114
|
+
|
|
115
|
+
def body_fn(val):
|
|
116
|
+
S, M, I, H = val
|
|
117
|
+
#print(S)
|
|
118
|
+
# Find the node with the fewest edges added
|
|
119
|
+
v = min_fill_heuristic(mask, I, S,M, top_mode)
|
|
120
|
+
# print("Frontal set: ",S)
|
|
121
|
+
# print("Marked: ", M)
|
|
122
|
+
# print("Selected: ",v)
|
|
123
|
+
# Add edge in I between unmarked neighbours in I
|
|
124
|
+
neighbours_v = I[v,:] & (~M)
|
|
125
|
+
I = I | (neighbours_v[:,None] & neighbours_v[None,:])
|
|
126
|
+
# Make unmarked neighbours of v, the parents of v in H
|
|
127
|
+
H = H.at[v,:].set(neighbours_v)
|
|
128
|
+
# Remove v from S and mark it
|
|
129
|
+
S = S.at [v].set(False)
|
|
130
|
+
M = M.at[v].set(True)
|
|
131
|
+
|
|
132
|
+
if top_mode == 1:
|
|
133
|
+
u = mask[:,v] & (~M) # Not marked children
|
|
134
|
+
upstream_u = mask & (u[:, None] & ~u[None,:]) # Parents of not marked children
|
|
135
|
+
all_upstream_u_marked = ~jnp.any(upstream_u & ~M, axis=1)
|
|
136
|
+
else:
|
|
137
|
+
u = mask[v,:] & (~M) # Not marked parents
|
|
138
|
+
upstream_u = mask & (u[None,:] & ~u[:, None]) # Children of not marked parents
|
|
139
|
+
all_upstream_u_marked = ~jnp.any(upstream_u & ~M, axis=1)
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
S = S | (u & all_upstream_u_marked)
|
|
144
|
+
S = S & (~condition_mask)
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
return S, M, I, H
|
|
148
|
+
|
|
149
|
+
_,_,_, H = jax.lax.while_loop(cond_fn, body_fn, (S, M, I, H))
|
|
150
|
+
H = H | jnp.eye(num_nodes, dtype=jnp.bool_)
|
|
151
|
+
H = jax.lax.cond(jnp.any(condition_mask), lambda x: x, lambda x: mask, H)
|
|
152
|
+
|
|
153
|
+
# Conditioned nodes will keep the unconditional edges, hence each row of H where condition_mask is true should be equal to "mask"
|
|
154
|
+
if conditioned_nodes == "unchanged":
|
|
155
|
+
H = H & ~condition_mask[:, None] | mask & condition_mask[:, None]
|
|
156
|
+
elif conditioned_nodes == "removed":
|
|
157
|
+
H = H & ~condition_mask[:, None]
|
|
158
|
+
elif conditioned_nodes == "added":
|
|
159
|
+
H = H | condition_mask[:, None]
|
|
160
|
+
|
|
161
|
+
return H
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
@partial(jax.jit, static_argnums=(4,))
|
|
168
|
+
def min_fill_heuristic(G, I, S, M, top_mode=0):
|
|
169
|
+
""" Min-fill heuristic for finding a node to eliminate"""
|
|
170
|
+
|
|
171
|
+
# 0 is child, 1 is parent
|
|
172
|
+
UPSTREAM = top_mode
|
|
173
|
+
DOWNSTREAM = 1 - top_mode
|
|
174
|
+
|
|
175
|
+
# Find the number of edges that would be added if we eliminated each node
|
|
176
|
+
num_edges_added = I.sum(axis=DOWNSTREAM)
|
|
177
|
+
num_edges_added = S * num_edges_added + (~S) * (I.shape[0] + 1)
|
|
178
|
+
# Find the node that would add the fewest edges
|
|
179
|
+
# Additional constraint: Prefer marked parents
|
|
180
|
+
#print(num_edges_added)
|
|
181
|
+
min_val = jnp.min(num_edges_added)
|
|
182
|
+
marked_parents = -jnp.sum(M[None,:] & G, axis=DOWNSTREAM)
|
|
183
|
+
num_parents= marked_parents * (num_edges_added == min_val) + (I.shape[0] + 1) * (num_edges_added != min_val)
|
|
184
|
+
#node_to_eliminate = jnp.argmin(num_edges_added + num_parents)
|
|
185
|
+
#print(num_parents)
|
|
186
|
+
reversed_array = (num_parents)[::-1]
|
|
187
|
+
index = jnp.argmin(reversed_array)
|
|
188
|
+
node_to_eliminate = len(reversed_array) - 1 - index
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
return node_to_eliminate
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
@jax.jit
|
|
195
|
+
def moralize(adj_matrix):
|
|
196
|
+
adj_matrix = adj_matrix.astype(jnp.bool_)
|
|
197
|
+
|
|
198
|
+
# Make the graph undirected
|
|
199
|
+
undirected_graph = adj_matrix | adj_matrix.T
|
|
200
|
+
|
|
201
|
+
# Add edges between parents
|
|
202
|
+
undirected_graph = undirected_graph | (adj_matrix.T @ adj_matrix)
|
|
203
|
+
|
|
204
|
+
return undirected_graph
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
def minimally_faithfull_mask(mask, condition_mask):
|
|
208
|
+
""" Minimally faithfull mask update for conditioning"""
|
|
209
|
+
I = moralize(mask)
|
|
210
|
+
H = jnp.zeros_like(mask, dtype=jnp.bool_)
|
|
211
|
+
|
gensbi_examples/mask.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import jax
|
|
2
|
+
import jax.numpy as jnp
|
|
3
|
+
import jax.random as jrandom
|
|
4
|
+
|
|
5
|
+
from functools import partial
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def sample_random_conditional_mask(
|
|
9
|
+
key, num_samples, theta_dim, x_dim, alpha=1.0, beta=4.0
|
|
10
|
+
):
|
|
11
|
+
# More likely to condition on a few nodes
|
|
12
|
+
key1, key2 = jax.random.split(key, 2)
|
|
13
|
+
condition_mask = jax.random.bernoulli(
|
|
14
|
+
key1,
|
|
15
|
+
jax.random.beta(key2, alpha, beta, shape=(num_samples, 1)),
|
|
16
|
+
shape=(num_samples, theta_dim + x_dim),
|
|
17
|
+
).astype(jnp.bool_)
|
|
18
|
+
all_ones_mask = jnp.all(condition_mask, axis=-1)
|
|
19
|
+
# If all are ones, then set to false
|
|
20
|
+
condition_mask = jnp.where(all_ones_mask[..., None], False, condition_mask)
|
|
21
|
+
return condition_mask
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def joint_conditional_mask(key, num_samples, theta_dim, x_dim):
|
|
25
|
+
return jnp.array([[False] * (theta_dim + x_dim)]*num_samples)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def posterior_conditional_mask(key, num_samples, theta_dim, x_dim):
|
|
29
|
+
return jnp.array([[False] * theta_dim + [True] * x_dim]*num_samples)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def likelihood_conditional_mask(key, num_samples, theta_dim, x_dim):
|
|
33
|
+
return jnp.array([[True] * theta_dim + [False] * x_dim]*num_samples)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def sample_structured_conditional_mask(
|
|
37
|
+
key,
|
|
38
|
+
num_samples,
|
|
39
|
+
theta_dim,
|
|
40
|
+
x_dim,
|
|
41
|
+
p_joint=0.2,
|
|
42
|
+
p_posterior=0.2,
|
|
43
|
+
p_likelihood=0.2,
|
|
44
|
+
p_rnd1=0.2,
|
|
45
|
+
p_rnd2=0.2,
|
|
46
|
+
rnd1_prob=0.3,
|
|
47
|
+
rnd2_prob=0.7,
|
|
48
|
+
):
|
|
49
|
+
# Joint, posterior, likelihood, random1_mask, random2_mask
|
|
50
|
+
key1, key2, key3 = jax.random.split(key, 3)
|
|
51
|
+
joint_mask = jnp.array([False] * (theta_dim + x_dim), dtype=jnp.bool_)
|
|
52
|
+
posterior_mask = jnp.array([False] * theta_dim + [True] * x_dim, dtype=jnp.bool_)
|
|
53
|
+
likelihood_mask = jnp.array([True] * theta_dim + [False] * x_dim, dtype=jnp.bool_)
|
|
54
|
+
random1_mask = jax.random.bernoulli(key2, rnd1_prob, shape=(theta_dim + x_dim,)).astype(jnp.bool_)
|
|
55
|
+
random2_mask = jax.random.bernoulli(key3, rnd2_prob, shape=(theta_dim + x_dim,)).astype(jnp.bool_)
|
|
56
|
+
mask_options = jnp.stack([joint_mask, posterior_mask, likelihood_mask, random1_mask, random2_mask], axis=0) # (5, theta_dim + x_dim)
|
|
57
|
+
idx = jax.random.choice(key1, 5, shape=(num_samples,), p=jnp.array([p_joint, p_posterior, p_likelihood, p_rnd1, p_rnd2]))
|
|
58
|
+
condition_mask = mask_options[idx]
|
|
59
|
+
all_ones_mask = jnp.all(condition_mask, axis=-1)
|
|
60
|
+
# If all are ones, then set to false
|
|
61
|
+
condition_mask = jnp.where(all_ones_mask[..., None], False, condition_mask)
|
|
62
|
+
return condition_mask
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def get_condition_mask_fn(name, **kwargs):
|
|
66
|
+
if name.lower() == "structured_random":
|
|
67
|
+
return partial(sample_structured_conditional_mask, **kwargs)
|
|
68
|
+
elif name.lower() == "random":
|
|
69
|
+
return partial(sample_random_conditional_mask, **kwargs)
|
|
70
|
+
elif name.lower() == "joint":
|
|
71
|
+
return partial(joint_conditional_mask, **kwargs)
|
|
72
|
+
elif name.lower() == "posterior":
|
|
73
|
+
return partial(posterior_conditional_mask, **kwargs)
|
|
74
|
+
elif name.lower() == "likelihood":
|
|
75
|
+
return partial(likelihood_conditional_mask, **kwargs)
|
|
76
|
+
else:
|
|
77
|
+
raise NotImplementedError()
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
|