pyg-nightly 2.7.0.dev20250222__py3-none-any.whl → 2.7.0.dev20250223__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pyg-nightly
3
- Version: 2.7.0.dev20250222
3
+ Version: 2.7.0.dev20250223
4
4
  Summary: Graph Neural Network Library for PyTorch
5
5
  Keywords: deep-learning,pytorch,geometric-deep-learning,graph-neural-networks,graph-convolutional-networks
6
6
  Author-email: Matthias Fey <matthias@pyg.org>
@@ -1,4 +1,4 @@
1
- torch_geometric/__init__.py,sha256=P_eeFAUMVSeYfsS1X62TDRCGKklzvWG3UW2-KJcsFAo,1978
1
+ torch_geometric/__init__.py,sha256=u79QBiX3vYzq10QGHDb7To3rzMR1UXXopOn8ptXDZ0A,1978
2
2
  torch_geometric/_compile.py,sha256=f-WQeH4VLi5Hn9lrgztFUCSrN_FImjhQa6BxFzcYC38,1338
3
3
  torch_geometric/_onnx.py,sha256=V9ffrIKSqhDw6xUZ12lkuSfNs48cQp2EeJ6Z19GfnVw,349
4
4
  torch_geometric/backend.py,sha256=lVaf7aLoVaB3M-UcByUJ1G4T4FOK6LXAg0CF4W3E8jo,1575
@@ -9,7 +9,7 @@ torch_geometric/deprecation.py,sha256=dWRymDIUkUVI2MeEmBG5WF4R6jObZeseSBV9G6FNfj
9
9
  torch_geometric/device.py,sha256=tU5-_lBNVbVHl_kUmWPwiG5mQ1pyapwMF4JkmtNN3MM,1224
10
10
  torch_geometric/edge_index.py,sha256=BsLh5tOZRjjSYDkjqOFAdBuvMaDg7EWaaLELYsUL0Z8,70048
11
11
  torch_geometric/experimental.py,sha256=JbtNNEXjFGI8hZ9raM6-qrZURP6Z5nlDK8QicZUIbz0,4756
12
- torch_geometric/hash_tensor.py,sha256=AlPwX3spNoJ4-gHLlLY9_beETe7eTbtYtY33tKOJs1g,14503
12
+ torch_geometric/hash_tensor.py,sha256=9Zg1KCebfN-xJE1dX2nGGYnK09snSyJkjaYVzCUOfkM,17278
13
13
  torch_geometric/home.py,sha256=EV54B4Dmiv61GDbkCwtCfWGWJ4eFGwZ8s3KOgGjwYgY,790
14
14
  torch_geometric/index.py,sha256=9ChzWFCwj2slNcVBOgfV-wQn-KscJe_y7502w-Vf76w,24045
15
15
  torch_geometric/inspector.py,sha256=nKi5o4Mn6xsG0Ex1GudTEQt_EqnF9mcMqGtp7Shh9sQ,19336
@@ -513,10 +513,10 @@ torch_geometric/sampler/base.py,sha256=kT6hYM6losYta3pqLQlqiqboJiujLy6RlH8qM--U_
513
513
  torch_geometric/sampler/hgt_sampler.py,sha256=UAm8_wwzEcziKDJ8-TnfZh1705dXRsy_I5PKhZSDTK8,2721
514
514
  torch_geometric/sampler/neighbor_sampler.py,sha256=MAVphWqNf0-cwlHRvdiU8de86dBxwjm3Miam_6s1ep4,33971
515
515
  torch_geometric/sampler/utils.py,sha256=RJtasO6Q7Pp3oYEOWrbf2DEYuSfuKZOsF2I7-eJDnoA,5485
516
- torch_geometric/testing/__init__.py,sha256=QUTeYNkmibxFu08AlZGzAnMHfEoBp2kt9o65k0wmfmU,1249
516
+ torch_geometric/testing/__init__.py,sha256=0mAGVWRrTBNsGV2YUkCu_FkyQ8JIcrYVw2LsdKgY9ak,1291
517
517
  torch_geometric/testing/asserts.py,sha256=DLC9HnBgFWuTIiQs2OalsQcXGhOVG-e6R99IWhkO32c,4606
518
518
  torch_geometric/testing/data.py,sha256=O1qo8FyNxt6RGf63Ys3eXBfa5RvYydeZLk74szrez3c,2604
519
- torch_geometric/testing/decorators.py,sha256=b0Xqpu-qdiElGo0cFG8cSu-Pqgce7NH8xcoI0NigWiM,8309
519
+ torch_geometric/testing/decorators.py,sha256=j45wlxMB1-Pn3wPKBgDziqg6KkWJUb_fcwfUXzkL2mM,8677
520
520
  torch_geometric/testing/distributed.py,sha256=ZZCCXqiQC4-m1ExSjDZhS_a1qPXnHEwhJGTmACxNnVI,2227
521
521
  torch_geometric/testing/feature_store.py,sha256=J6JBIt2XK-t8yG8B4JzXp-aJcVl5jaCS1m2H7d6OUxs,2158
522
522
  torch_geometric/testing/graph_store.py,sha256=00B7QToCIspYmgN7svQKp1iU-qAzEtrt3VQRFxkHfuk,1044
@@ -633,7 +633,7 @@ torch_geometric/utils/undirected.py,sha256=H_nfpI0_WluOG6VfjPyldvcjL4w5USAKWu2x5
633
633
  torch_geometric/visualization/__init__.py,sha256=PyR_4K5SafsJrBr6qWrkjKr6GBL1b7FtZybyXCDEVwY,154
634
634
  torch_geometric/visualization/graph.py,sha256=ZuLPL92yGRi7lxlqsUPwL_EVVXF7P2kMcveTtW79vpA,4784
635
635
  torch_geometric/visualization/influence.py,sha256=CWMvuNA_Nf1sfbJmQgn58yS4OFpeKXeZPe7kEuvkUBw,477
636
- pyg_nightly-2.7.0.dev20250222.dist-info/licenses/LICENSE,sha256=ic-27cMJc1kWoMEYncz3Ya3Ur2Bi3bNLWib2DT763-o,1067
637
- pyg_nightly-2.7.0.dev20250222.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82
638
- pyg_nightly-2.7.0.dev20250222.dist-info/METADATA,sha256=G_VhMGb5Inx-4RejhUEhgXEGyFipKayNtCZpJJyiTyA,63021
639
- pyg_nightly-2.7.0.dev20250222.dist-info/RECORD,,
636
+ pyg_nightly-2.7.0.dev20250223.dist-info/licenses/LICENSE,sha256=ic-27cMJc1kWoMEYncz3Ya3Ur2Bi3bNLWib2DT763-o,1067
637
+ pyg_nightly-2.7.0.dev20250223.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82
638
+ pyg_nightly-2.7.0.dev20250223.dist-info/METADATA,sha256=o3vW1MbKajweST33mDeCk-b1CKb5wGegFomLfUE_rOQ,63021
639
+ pyg_nightly-2.7.0.dev20250223.dist-info/RECORD,,
@@ -31,7 +31,7 @@ from .lazy_loader import LazyLoader
31
31
  contrib = LazyLoader('contrib', globals(), 'torch_geometric.contrib')
32
32
  graphgym = LazyLoader('graphgym', globals(), 'torch_geometric.graphgym')
33
33
 
34
- __version__ = '2.7.0.dev20250222'
34
+ __version__ = '2.7.0.dev20250223'
35
35
 
36
36
  __all__ = [
37
37
  'Index',
@@ -209,6 +209,20 @@ class HashTensor(Tensor):
209
209
 
210
210
  return out
211
211
 
212
+ @property
213
+ def _key(self) -> Tensor:
214
+ if isinstance(self._map, Tensor):
215
+ mask = self._map >= 0
216
+ key = mask.nonzero().view(-1) - 1
217
+ key = key[self._map[mask]]
218
+ elif (torch_geometric.typing.WITH_CUDA_HASH_MAP
219
+ or torch_geometric.typing.WITH_CPU_HASH_MAP):
220
+ key = self._map.keys().to(self.device)
221
+ else:
222
+ key = torch.from_numpy(self._map.categories.to_numpy())
223
+
224
+ return key.to(self.device)
225
+
212
226
  def _shallow_copy(self) -> 'HashTensor':
213
227
  return self._from_data(
214
228
  self._map,
@@ -239,12 +253,16 @@ class HashTensor(Tensor):
239
253
  return index.to(self.dtype)
240
254
 
241
255
  out = self._value[index]
256
+
242
257
  mask = index != -1
243
258
  mask = mask.view([-1] + [1] * (out.dim() - 1))
244
- if out.is_floating_point():
245
- return out.where(mask, float('NaN'))
259
+ fill_value = float('NaN') if out.is_floating_point() else -1
260
+ if torch_geometric.typing.WITH_PT20:
261
+ other: Union[int, float, Tensor] = fill_value
246
262
  else:
247
- return out.where(mask, -1)
263
+ other = torch.full_like(out, fill_value)
264
+
265
+ return out.where(mask, other)
248
266
 
249
267
  # Methods #################################################################
250
268
 
@@ -281,9 +299,23 @@ class HashTensor(Tensor):
281
299
  kwargs)
282
300
  return func(*args, **(kwargs or {}))
283
301
 
284
- def index_select(self, dim: int, index: Any) -> Tensor: # type: ignore
302
+ def tolist(self) -> List[Any]:
303
+ return self.as_tensor().tolist()
304
+
305
+ def index_select( # type: ignore
306
+ self,
307
+ dim: int,
308
+ index: Any,
309
+ ) -> Union['HashTensor', Tensor]:
285
310
  return torch.index_select(self, dim, index)
286
311
 
312
+ def select( # type: ignore
313
+ self,
314
+ dim: int,
315
+ index: Any,
316
+ ) -> Union['HashTensor', Tensor]:
317
+ return torch.select(self, dim, index)
318
+
287
319
 
288
320
  @implements(aten.alias.default)
289
321
  def _alias(tensor: HashTensor) -> HashTensor:
@@ -359,9 +391,13 @@ def _squeeze_default(tensor: HashTensor) -> HashTensor:
359
391
  if tensor._value is None:
360
392
  return tensor._shallow_copy()
361
393
 
394
+ value = tensor.as_tensor()
395
+ for d in range(tensor.dim() - 1, 0, -1):
396
+ value = value.squeeze(d)
397
+
362
398
  return tensor._from_data(
363
399
  tensor._map,
364
- aten.squeeze.dims(tensor._value, list(range(1, tensor.dim()))),
400
+ value,
365
401
  tensor._min_key,
366
402
  tensor._max_key,
367
403
  num_keys=tensor.size(0),
@@ -387,11 +423,14 @@ def _squeeze_dim(
387
423
  if tensor._value is None:
388
424
  return tensor._shallow_copy()
389
425
 
390
- dim = [d for d in dim if d != 0 and d != -tensor.dim()]
426
+ value = tensor.as_tensor()
427
+ for d in dim[::-1]:
428
+ if d != 0 and d != -tensor.dim():
429
+ value = value.squeeze(d)
391
430
 
392
431
  return tensor._from_data(
393
432
  tensor._map,
394
- aten.squeeze.dims(tensor._value, dim),
433
+ value,
395
434
  tensor._min_key,
396
435
  tensor._max_key,
397
436
  num_keys=tensor.size(0),
@@ -406,10 +445,18 @@ def _slice(
406
445
  start: Optional[int] = None,
407
446
  end: Optional[int] = None,
408
447
  step: int = 1,
409
- ) -> Union[HashTensor, Tensor]:
448
+ ) -> HashTensor:
410
449
 
411
450
  if dim == 0 or dim == -tensor.dim():
412
- return aten.slice.Tensor(tensor.as_tensor(), dim, start, end, step)
451
+ copy = start is None or (start == 0 or start <= -tensor.size(0))
452
+ copy &= end is None or end > tensor.size(0)
453
+ copy &= step == 1
454
+ if copy:
455
+ return tensor._shallow_copy()
456
+
457
+ key = aten.slice.Tensor(tensor._key, 0, start, end, step)
458
+ value = aten.slice.Tensor(tensor.as_tensor(), 0, start, end, step)
459
+ return tensor.__class__(key, value)
413
460
 
414
461
  return tensor._from_data(
415
462
  tensor._map,
@@ -429,7 +476,7 @@ _old_index_select = torch.index_select
429
476
  def _new_index_select(
430
477
  input: Tensor,
431
478
  dim: int,
432
- index: Any,
479
+ index: Tensor,
433
480
  *,
434
481
  out: Optional[Tensor] = None,
435
482
  ) -> Tensor:
@@ -467,3 +514,54 @@ def _index_select(
467
514
  num_keys=tensor.size(0),
468
515
  dtype=tensor.dtype,
469
516
  )
517
+
518
+
519
+ # Since PyTorch does only allow PyTorch tensors as indices in `select`, we need
520
+ # to create a wrapper function and monkey patch `select` :(
521
+ _old_select = torch.select
522
+
523
+
524
+ def _new_select(
525
+ input: Tensor,
526
+ dim: int,
527
+ index: int,
528
+ ) -> Tensor:
529
+
530
+ if dim < -input.dim() or dim >= input.dim():
531
+ raise IndexError(f"Dimension out of range (expected to be in range of "
532
+ f"[{-input.dim()}, {input.dim()-1}], but got {dim})")
533
+
534
+ # We convert any index in the first dimension into an integer. This means
535
+ # that downstream handling (i.e. in `aten.select.int`) needs to take this
536
+ # pre-conversion into account.
537
+ if isinstance(input, HashTensor) and (dim == 0 or dim == -input.dim()):
538
+ index = int(as_key_tensor([index]))
539
+ return _old_select(input, dim, index)
540
+
541
+
542
+ torch.select = _new_select # type: ignore
543
+
544
+
545
+ @implements(aten.select.int)
546
+ def _select(
547
+ tensor: HashTensor,
548
+ dim: int,
549
+ index: int,
550
+ ) -> Union[HashTensor, Tensor]:
551
+
552
+ if dim == 0 or dim == -tensor.dim():
553
+ key = torch.tensor(
554
+ [index],
555
+ dtype=tensor._min_key.dtype,
556
+ device=tensor._min_key.device,
557
+ )
558
+ return tensor._get(key).squeeze(0)
559
+
560
+ return tensor._from_data(
561
+ tensor._map,
562
+ aten.select.int(tensor.as_tensor(), dim, index),
563
+ tensor._min_key,
564
+ tensor._max_key,
565
+ num_keys=tensor.size(0),
566
+ dtype=tensor.dtype,
567
+ )
@@ -22,6 +22,7 @@ from .decorators import (
22
22
  withDevice,
23
23
  withCUDA,
24
24
  withMETIS,
25
+ withHashTensor,
25
26
  disableExtensions,
26
27
  withoutExtensions,
27
28
  )
@@ -53,6 +54,7 @@ __all__ = [
53
54
  'withDevice',
54
55
  'withCUDA',
55
56
  'withMETIS',
57
+ 'withHashTensor',
56
58
  'disableExtensions',
57
59
  'withoutExtensions',
58
60
  'assert_module',
@@ -10,6 +10,7 @@ from packaging.requirements import Requirement
10
10
  from packaging.version import Version
11
11
 
12
12
  import torch_geometric
13
+ import torch_geometric.typing
13
14
  from torch_geometric.typing import WITH_METIS, WITH_PYG_LIB, WITH_TORCH_SPARSE
14
15
  from torch_geometric.visualization.graph import has_graphviz
15
16
 
@@ -265,6 +266,17 @@ def withMETIS(func: Callable) -> Callable:
265
266
  )(func)
266
267
 
267
268
 
269
+ def withHashTensor(func: Callable) -> Callable:
270
+ r"""A decorator to only test in case :class:`HashTensor` is available."""
271
+ import pytest
272
+
273
+ return pytest.mark.skipif(
274
+ not torch_geometric.typing.WITH_CPU_HASH_MAP
275
+ and not has_package('pandas'),
276
+ reason="HashTensor dependencies not available",
277
+ )(func)
278
+
279
+
268
280
  def disableExtensions(func: Callable) -> Callable:
269
281
  r"""A decorator to temporarily disable the usage of the
270
282
  :obj:`torch_scatter`, :obj:`torch_sparse` and :obj:`pyg_lib` extension