molcraft 0.1.0a8__py3-none-any.whl → 0.1.0a9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of molcraft might be problematic. Click here for more details.

molcraft/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
- __version__ = '0.1.0a8'
1
+ __version__ = '0.1.0a9'
2
2
 
3
3
  import os
4
4
  os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
molcraft/featurizers.py CHANGED
@@ -280,8 +280,6 @@ class MolGraphFeaturizer(Featurizer):
280
280
  mol.get_bond_between_atoms(atom_i, atom_j).index
281
281
  )
282
282
  edge['feature'] = bond_feature[bond_indices]
283
- if self.self_loops:
284
- edge['self_loop'] = (edge['source'] == edge['target'])
285
283
  else:
286
284
  paths = chem.get_shortest_paths(
287
285
  mol, radius=self.radius, self_loops=self.self_loops
@@ -376,7 +374,7 @@ class MolGraphFeaturizer(Featurizer):
376
374
  num_nodes = node['feature'].shape[0]
377
375
  node = _add_super_nodes(node, num_super_nodes)
378
376
  edge = _add_super_edges(
379
- edge, num_nodes, num_super_nodes, self.feature_dtype, self.index_dtype
377
+ edge, num_nodes, num_super_nodes, self.feature_dtype, self.index_dtype, self.self_loops
380
378
  )
381
379
  return node, edge
382
380
 
@@ -708,11 +706,15 @@ def _add_super_edges(
708
706
  num_super_nodes: int,
709
707
  feature_dtype: str,
710
708
  index_dtype: str,
709
+ self_loops: bool,
711
710
  ) -> dict[str, np.ndarray]:
712
711
  edge = copy.deepcopy(edge)
713
- super_node_indices = (
714
- np.repeat(np.arange(num_super_nodes), [num_nodes]) + num_nodes
715
- )
712
+
713
+ super_node_indices = np.arange(num_super_nodes) + num_nodes
714
+ if self_loops:
715
+ edge['source'] = np.concatenate([edge['source'], super_node_indices])
716
+ edge['target'] = np.concatenate([edge['target'], super_node_indices])
717
+ super_node_indices = np.repeat(super_node_indices, [num_nodes])
716
718
  node_indices = (
717
719
  np.tile(np.arange(num_nodes), [num_super_nodes])
718
720
  )
@@ -727,6 +729,8 @@ def _add_super_edges(
727
729
  if 'feature' in edge:
728
730
  num_edges = int(edge['feature'].shape[0])
729
731
  num_super_edges = int(num_super_nodes * num_nodes * 2)
732
+ if self_loops:
733
+ num_super_edges += num_super_nodes
730
734
  edge['super'] = np.asarray(
731
735
  ([False] * num_edges + [True] * num_super_edges),
732
736
  dtype=bool
@@ -741,12 +745,6 @@ def _add_super_edges(
741
745
  ]
742
746
  )
743
747
 
744
- if 'self_loop' in edge:
745
- edge['self_loop'] = np.pad(
746
- edge['self_loop'], [(0, num_nodes * num_super_nodes * 2)],
747
- constant_values=False,
748
- )
749
-
750
748
  return edge
751
749
 
752
750
 
molcraft/layers.py CHANGED
@@ -1303,17 +1303,12 @@ class NodeEmbedding(GraphLayer):
1303
1303
  dim: int = None,
1304
1304
  normalize: bool = False,
1305
1305
  embed_context: bool = False,
1306
- allow_reconstruction: bool = False,
1307
- allow_masking: bool = False,
1308
1306
  **kwargs
1309
1307
  ) -> None:
1310
1308
  super().__init__(**kwargs)
1311
1309
  self.dim = dim
1312
1310
  self._normalize = normalize
1313
1311
  self._embed_context = embed_context
1314
- self._masking_rate = None
1315
- self._allow_masking = allow_masking
1316
- self._allow_reconstruction = allow_reconstruction
1317
1312
 
1318
1313
  def build(self, spec: tensors.GraphTensor.Spec) -> None:
1319
1314
  feature_dim = spec.node['feature'].shape[-1]
@@ -1327,8 +1322,6 @@ class NodeEmbedding(GraphLayer):
1327
1322
  self._embed_context = False
1328
1323
  if self._has_super and not self._embed_context:
1329
1324
  self._super_feature = self.get_weight(shape=[self.dim], name='super_node_feature')
1330
- if self._allow_masking:
1331
- self._mask_feature = self.get_weight(shape=[self.dim], name='mask_node_feature')
1332
1325
  if self._embed_context:
1333
1326
  self._context_dense = self.get_dense(self.dim)
1334
1327
 
@@ -1342,28 +1335,18 @@ class NodeEmbedding(GraphLayer):
1342
1335
  def propagate(self, tensor: tensors.GraphTensor) -> tensors.GraphTensor:
1343
1336
  feature = self._node_dense(tensor.node['feature'])
1344
1337
 
1345
- if self._has_super:
1346
- super_feature = (0 if self._embed_context else self._super_feature)
1338
+ if self._has_super and not self._embed_context:
1347
1339
  super_mask = keras.ops.expand_dims(tensor.node['super'], 1)
1348
- feature = keras.ops.where(super_mask, super_feature, feature)
1340
+ feature = keras.ops.where(super_mask, self._super_feature, feature)
1349
1341
 
1350
1342
  if self._embed_context:
1351
1343
  context_feature = self._context_dense(tensor.context['feature'])
1352
1344
  feature = ops.scatter_update(feature, tensor.node['super'], context_feature)
1353
1345
  tensor = tensor.update({'context': {'feature': None}})
1354
1346
 
1355
- apply_mask = (self._allow_masking and 'mask' in tensor.node)
1356
- if apply_mask:
1357
- mask = keras.ops.expand_dims(tensor.node['mask'], -1)
1358
- feature = keras.ops.where(mask, self._mask_feature, feature)
1359
- elif self._allow_masking:
1360
- feature = feature + (self._mask_feature * 0.0)
1361
-
1362
1347
  feature = self._norm(feature)
1363
1348
 
1364
- if not self._allow_reconstruction:
1365
- return tensor.update({'node': {'feature': feature}})
1366
- return tensor.update({'node': {'feature': feature, 'target_feature': feature}})
1349
+ return tensor.update({'node': {'feature': feature}})
1367
1350
 
1368
1351
  def get_config(self) -> dict:
1369
1352
  config = super().get_config()
@@ -1371,8 +1354,6 @@ class NodeEmbedding(GraphLayer):
1371
1354
  'dim': self.dim,
1372
1355
  'normalize': self._normalize,
1373
1356
  'embed_context': self._embed_context,
1374
- 'allow_masking': self._allow_masking,
1375
- 'allow_reconstruction': self._allow_reconstruction,
1376
1357
  })
1377
1358
  return config
1378
1359
 
@@ -1389,39 +1370,30 @@ class EdgeEmbedding(GraphLayer):
1389
1370
  self,
1390
1371
  dim: int = None,
1391
1372
  normalize: bool = False,
1392
- allow_masking: bool = True,
1393
1373
  **kwargs
1394
1374
  ) -> None:
1395
1375
  super().__init__(**kwargs)
1396
1376
  self.dim = dim
1397
1377
  self._normalize = normalize
1398
- self._masking_rate = None
1399
- self._allow_masking = allow_masking
1400
1378
 
1401
1379
  def build(self, spec: tensors.GraphTensor.Spec) -> None:
1402
1380
  feature_dim = spec.edge['feature'].shape[-1]
1403
1381
  if not self.dim:
1404
1382
  self.dim = feature_dim
1405
- self._edge_dense = self.get_dense(self.dim)
1383
+ self._edge_dense = self.get_dense(self.dim)
1384
+
1385
+ self._self_loop_feature = self.get_weight(shape=[self.dim], name='self_loop_edge_feature')
1406
1386
 
1407
1387
  self._has_super = 'super' in spec.edge
1408
- self._has_self_loop = 'self_loop' in spec.edge
1409
1388
  if self._has_super:
1410
1389
  self._super_feature = self.get_weight(shape=[self.dim], name='super_edge_feature')
1411
- if self._has_self_loop:
1412
- self._self_loop_feature = self.get_weight(shape=[self.dim], name='self_loop_edge_feature')
1413
- if self._allow_masking:
1414
- self._mask_feature = self.get_weight(shape=[self.dim], name='mask_edge_feature')
1415
-
1416
- if self._normalize:
1417
- if str(self._normalize).lower().startswith('batch'):
1418
- self._norm = keras.layers.BatchNormalization(
1419
- name='output_batch_norm'
1420
- )
1421
- else:
1422
- self._norm = keras.layers.LayerNormalization(
1423
- name='output_layer_norm'
1424
- )
1390
+
1391
+ if not self._normalize:
1392
+ self._norm = keras.layers.Identity()
1393
+ elif str(self._normalize).lower().startswith('layer'):
1394
+ self._norm = keras.layers.LayerNormalization()
1395
+ else:
1396
+ self._norm = keras.layers.BatchNormalization()
1425
1397
 
1426
1398
  def propagate(self, tensor: tensors.GraphTensor) -> tensors.GraphTensor:
1427
1399
  feature = self._edge_dense(tensor.edge['feature'])
@@ -1430,51 +1402,18 @@ class EdgeEmbedding(GraphLayer):
1430
1402
  super_mask = keras.ops.expand_dims(tensor.edge['super'], 1)
1431
1403
  feature = keras.ops.where(super_mask, self._super_feature, feature)
1432
1404
 
1433
- if self._has_self_loop:
1434
- self_loop_mask = keras.ops.expand_dims(tensor.edge['self_loop'], 1)
1435
- feature = keras.ops.where(self_loop_mask, self._self_loop_feature, feature)
1436
-
1437
- if (
1438
- self._allow_masking and
1439
- self._masking_rate is not None and
1440
- self._masking_rate > 0
1441
- ):
1442
- random = keras.random.uniform(shape=[tensor.num_edges])
1443
- mask = random <= self._masking_rate
1444
- if self._has_super:
1445
- mask = keras.ops.logical_and(
1446
- mask, keras.ops.logical_not(tensor.edge['super'])
1447
- )
1448
- mask = keras.ops.expand_dims(mask, -1)
1449
- feature = keras.ops.where(mask, self._mask_feature, feature)
1450
- elif self._allow_masking:
1451
- # Simply added to silence warning ('no gradients for variables ...')
1452
- feature += (0.0 * self._mask_feature)
1453
-
1454
- if self._normalize:
1455
- feature = self._norm(feature)
1405
+ self_loop_mask = keras.ops.expand_dims(tensor.edge['source'] == tensor.edge['target'], 1)
1406
+ feature = keras.ops.where(self_loop_mask, self._self_loop_feature, feature)
1456
1407
 
1457
- return tensor.update({'edge': {'feature': feature, 'embedding': feature}})
1408
+ feature = self._norm(feature)
1458
1409
 
1459
- @property
1460
- def masking_rate(self):
1461
- return self._masking_rate
1462
-
1463
- @masking_rate.setter
1464
- def masking_rate(self, rate: float):
1465
- if not self._allow_masking and rate is not None:
1466
- raise ValueError(
1467
- f'Cannot set `masking_rate` for layer {self} '
1468
- 'as `allow_masking` was set to `False`.'
1469
- )
1470
- self._masking_rate = float(rate)
1410
+ return tensor.update({'edge': {'feature': feature}})
1471
1411
 
1472
1412
  def get_config(self) -> dict:
1473
1413
  config = super().get_config()
1474
1414
  config.update({
1475
1415
  'dim': self.dim,
1476
1416
  'normalize': self._normalize,
1477
- 'allow_masking': self._allow_masking
1478
1417
  })
1479
1418
  return config
1480
1419
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: molcraft
3
- Version: 0.1.0a8
3
+ Version: 0.1.0a9
4
4
  Summary: Graph Neural Networks for Molecular Machine Learning
5
5
  Author-email: Alexander Kensert <alexander.kensert@gmail.com>
6
6
  License: MIT License
@@ -1,19 +1,19 @@
1
- molcraft/__init__.py,sha256=s8dUh6Fjq34j2aNgF13Y2NUkDwBWmsOAuIJVgY3gwCE,463
1
+ molcraft/__init__.py,sha256=8f1z8Lhuhh8TxB-QGHI5w4a3M_ZZNH8EWGD4Y6pB578,463
2
2
  molcraft/callbacks.py,sha256=x5HnkZhqcFRrW6xdApt_jZ4X08A-0fxcnFKfdmRKa0c,3571
3
3
  molcraft/chem.py,sha256=zHH7iX0ZJ7QmP-YqR_IXCpylTwCXHXptWf1DsblnZR4,21496
4
4
  molcraft/conformers.py,sha256=K6ZtiSUNDN_fwqGP9JrPcwALLFFvlMlF_XejEJH3Sr4,4205
5
5
  molcraft/datasets.py,sha256=rFgXTC1ZheLhfgQgcCspP_wEE54a33PIneH7OplbS-8,4047
6
6
  molcraft/descriptors.py,sha256=gKqlJ3BqJLTeR2ft8isftSEaJDC8cv64eTq5IYhy4XM,3032
7
7
  molcraft/features.py,sha256=aBYxDfQqQsVuyjKaPUlwEgvCjbNZ-FJhuKo2Cg5ajrA,13554
8
- molcraft/featurizers.py,sha256=qNmXSOAeplICN3j-nzvWACVuKoJ_ZBzhYP9LterKVH8,27042
9
- molcraft/layers.py,sha256=KKaH58zuov5aARj72BS_xK3ZQEwSFJrIPkoXQAAcqz8,62285
8
+ molcraft/featurizers.py,sha256=ybJ1djH747cgsftztWHxAX2iTq6k03MYr17btQ2Gtcs,27063
9
+ molcraft/layers.py,sha256=r6hEAyJxO_Yrw5hD1r2v8yb_UxLRK9S4FMjDCUQedH8,59655
10
10
  molcraft/losses.py,sha256=JEKZEX2f8vDgky_fUocsF8vZjy9VMzRjZUBa20Uf9Qw,1065
11
11
  molcraft/models.py,sha256=FLXpO3OUmRxLmxG3MjBK4ZwcVFlea1gqEgs1ibKly2w,23263
12
12
  molcraft/ops.py,sha256=dLIUq-KG8nOzEcphJqNbF_f82VZRDNrB1UKrcPt5JNM,4752
13
13
  molcraft/records.py,sha256=0sjOdcr266ZER4F-aTBQ3AVPNAwflKWNiNJVsSc1-PQ,5370
14
14
  molcraft/tensors.py,sha256=EOUKx496KUZsjA1zA2ABc7tU_TW3Jv7AXDsug_QsLbA,22407
15
- molcraft-0.1.0a8.dist-info/licenses/LICENSE,sha256=sbVeqlrtZ0V63uYhZGL5dCxUm8rBAOqe2avyA1zIQNk,1074
16
- molcraft-0.1.0a8.dist-info/METADATA,sha256=CtHK0DVlQECWUdlhg0KzvvpPyUD150BSyfzkdNF3fT8,4062
17
- molcraft-0.1.0a8.dist-info/WHEEL,sha256=Nw36Djuh_5VDukK0H78QzOX-_FQEo6V37m3nkm96gtU,91
18
- molcraft-0.1.0a8.dist-info/top_level.txt,sha256=dENV6MfOceshM6MQCgJlcN1ojZkiCL9B4F7XyUge3QM,9
19
- molcraft-0.1.0a8.dist-info/RECORD,,
15
+ molcraft-0.1.0a9.dist-info/licenses/LICENSE,sha256=sbVeqlrtZ0V63uYhZGL5dCxUm8rBAOqe2avyA1zIQNk,1074
16
+ molcraft-0.1.0a9.dist-info/METADATA,sha256=HiwS2wmntCA7m_YpgSWKiJTP0BFpl4GWWz4a77w1XBw,4062
17
+ molcraft-0.1.0a9.dist-info/WHEEL,sha256=Nw36Djuh_5VDukK0H78QzOX-_FQEo6V37m3nkm96gtU,91
18
+ molcraft-0.1.0a9.dist-info/top_level.txt,sha256=dENV6MfOceshM6MQCgJlcN1ojZkiCL9B4F7XyUge3QM,9
19
+ molcraft-0.1.0a9.dist-info/RECORD,,