pyg-nightly 2.7.0.dev20250224__py3-none-any.whl → 2.7.0.dev20250225__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pyg-nightly
3
- Version: 2.7.0.dev20250224
3
+ Version: 2.7.0.dev20250225
4
4
  Summary: Graph Neural Network Library for PyTorch
5
5
  Keywords: deep-learning,pytorch,geometric-deep-learning,graph-neural-networks,graph-convolutional-networks
6
6
  Author-email: Matthias Fey <matthias@pyg.org>
@@ -1,4 +1,4 @@
1
- torch_geometric/__init__.py,sha256=nc0R-kBpZZJ_9mHk-xQJhyukW9OMLPN9EmnGrJIwu1c,1978
1
+ torch_geometric/__init__.py,sha256=2_FzSuy3z_Dpnuncdqy_FCcZmG9u9595U4yzJS-1EiY,1978
2
2
  torch_geometric/_compile.py,sha256=f-WQeH4VLi5Hn9lrgztFUCSrN_FImjhQa6BxFzcYC38,1338
3
3
  torch_geometric/_onnx.py,sha256=V9ffrIKSqhDw6xUZ12lkuSfNs48cQp2EeJ6Z19GfnVw,349
4
4
  torch_geometric/backend.py,sha256=lVaf7aLoVaB3M-UcByUJ1G4T4FOK6LXAg0CF4W3E8jo,1575
@@ -9,7 +9,7 @@ torch_geometric/deprecation.py,sha256=dWRymDIUkUVI2MeEmBG5WF4R6jObZeseSBV9G6FNfj
9
9
  torch_geometric/device.py,sha256=tU5-_lBNVbVHl_kUmWPwiG5mQ1pyapwMF4JkmtNN3MM,1224
10
10
  torch_geometric/edge_index.py,sha256=BsLh5tOZRjjSYDkjqOFAdBuvMaDg7EWaaLELYsUL0Z8,70048
11
11
  torch_geometric/experimental.py,sha256=JbtNNEXjFGI8hZ9raM6-qrZURP6Z5nlDK8QicZUIbz0,4756
12
- torch_geometric/hash_tensor.py,sha256=koofBrEEo5oHCCt0gH6gCkiywyxSFSGdk-keg7MXGf4,19490
12
+ torch_geometric/hash_tensor.py,sha256=xXKWffFz4ML4jTKPNagiAWqu-Cjptmb3WNhIPo0C0pw,23200
13
13
  torch_geometric/home.py,sha256=EV54B4Dmiv61GDbkCwtCfWGWJ4eFGwZ8s3KOgGjwYgY,790
14
14
  torch_geometric/index.py,sha256=9ChzWFCwj2slNcVBOgfV-wQn-KscJe_y7502w-Vf76w,24045
15
15
  torch_geometric/inspector.py,sha256=nKi5o4Mn6xsG0Ex1GudTEQt_EqnF9mcMqGtp7Shh9sQ,19336
@@ -633,7 +633,7 @@ torch_geometric/utils/undirected.py,sha256=H_nfpI0_WluOG6VfjPyldvcjL4w5USAKWu2x5
633
633
  torch_geometric/visualization/__init__.py,sha256=PyR_4K5SafsJrBr6qWrkjKr6GBL1b7FtZybyXCDEVwY,154
634
634
  torch_geometric/visualization/graph.py,sha256=ZuLPL92yGRi7lxlqsUPwL_EVVXF7P2kMcveTtW79vpA,4784
635
635
  torch_geometric/visualization/influence.py,sha256=CWMvuNA_Nf1sfbJmQgn58yS4OFpeKXeZPe7kEuvkUBw,477
636
- pyg_nightly-2.7.0.dev20250224.dist-info/licenses/LICENSE,sha256=ic-27cMJc1kWoMEYncz3Ya3Ur2Bi3bNLWib2DT763-o,1067
637
- pyg_nightly-2.7.0.dev20250224.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82
638
- pyg_nightly-2.7.0.dev20250224.dist-info/METADATA,sha256=OzM9N2hJ-COuEqz7HmEj8_wZkdLZDavaZfWDS5j08SA,63021
639
- pyg_nightly-2.7.0.dev20250224.dist-info/RECORD,,
636
+ pyg_nightly-2.7.0.dev20250225.dist-info/licenses/LICENSE,sha256=ic-27cMJc1kWoMEYncz3Ya3Ur2Bi3bNLWib2DT763-o,1067
637
+ pyg_nightly-2.7.0.dev20250225.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82
638
+ pyg_nightly-2.7.0.dev20250225.dist-info/METADATA,sha256=m8UNJu0M5iArtdBVYT4uMh9AJdhwbI6VVLnsGvQcgnI,63021
639
+ pyg_nightly-2.7.0.dev20250225.dist-info/RECORD,,
@@ -31,7 +31,7 @@ from .lazy_loader import LazyLoader
31
31
  contrib = LazyLoader('contrib', globals(), 'torch_geometric.contrib')
32
32
  graphgym = LazyLoader('graphgym', globals(), 'torch_geometric.graphgym')
33
33
 
34
- __version__ = '2.7.0.dev20250224'
34
+ __version__ = '2.7.0.dev20250225'
35
35
 
36
36
  __all__ = [
37
37
  'Index',
@@ -12,6 +12,7 @@ from typing import (
12
12
  Union,
13
13
  )
14
14
 
15
+ import numpy as np
15
16
  import torch
16
17
  import torch.utils._pytree as pytree
17
18
  import xxhash
@@ -245,7 +246,7 @@ class HashTensor(Tensor):
245
246
  import pandas as pd
246
247
 
247
248
  ser = pd.Series(query.cpu().numpy(), dtype=self._map)
248
- index = torch.from_numpy(ser.cat.codes.to_numpy()).to(torch.long)
249
+ index = torch.from_numpy(ser.cat.codes.to_numpy().copy()).long()
249
250
 
250
251
  index = index.to(self.device)
251
252
 
@@ -299,10 +300,44 @@ class HashTensor(Tensor):
299
300
  kwargs)
300
301
  return func(*args, **(kwargs or {}))
301
302
 
303
+ def __tensor_flatten__(self) -> Tuple[List[str], Tuple[Any, ...]]:
304
+ attrs = ['_map', '_min_key', '_max_key']
305
+ if self._value is not None:
306
+ attrs.append('_value')
307
+
308
+ ctx = (self.size(0), self.dtype)
309
+
310
+ return attrs, ctx
311
+
312
+ @staticmethod
313
+ def __tensor_unflatten__(
314
+ inner_tensors: Dict[str, Any],
315
+ ctx: Tuple[Any, ...],
316
+ outer_size: Tuple[int, ...],
317
+ outer_stride: Tuple[int, ...],
318
+ ) -> 'HashTensor':
319
+ return HashTensor._from_data(
320
+ inner_tensors['_map'],
321
+ inner_tensors.get('_value', None),
322
+ inner_tensors['_min_key'],
323
+ inner_tensors['_min_key'],
324
+ num_keys=ctx[0],
325
+ dtype=ctx[1],
326
+ )
327
+
328
+ def __repr__(self) -> str: # type: ignore
329
+ indent = len(f'{self.__class__.__name__}(')
330
+ tensor_str = torch._tensor_str._tensor_str(self.as_tensor(), indent)
331
+ return torch._tensor_str._str_intern(self, tensor_contents=tensor_str)
332
+
302
333
  def tolist(self) -> List[Any]:
303
334
  """""" # noqa: D419
304
335
  return self.as_tensor().tolist()
305
336
 
337
+ def numpy(self, *, force: bool = False) -> np.ndarray:
338
+ """""" # noqa: D419
339
+ return self.as_tensor().numpy(force=force)
340
+
306
341
  def index_select( # type: ignore
307
342
  self,
308
343
  dim: int,
@@ -339,6 +374,33 @@ class HashTensor(Tensor):
339
374
  self._value.detach_()
340
375
  return super().detach_() # type: ignore
341
376
 
377
+ def __getitem__(self, indices: Any) -> Union['HashTensor', Tensor]:
378
+ if not isinstance(indices, tuple):
379
+ indices = (indices, )
380
+ assert len(indices) > 0
381
+
382
+ # We convert any index tensor in the first dimension into a tensor.
383
+ # This means that downstream handling (i.e. in `aten.index.Tensor`)
384
+ # needs to take this pre-conversion into account. However, detecting
385
+ # whether the first dimension is indexed can be tricky at times:
386
+ # * We need to take into account `Ellipsis`
387
+ # * We need to take any unsqueezing into account
388
+ if indices[0] is Ellipsis and len(indices) > 1:
389
+ nonempty_indices = [i for i in indices[1:] if i is not None]
390
+ if len(nonempty_indices) == self.dim():
391
+ indices = indices[1:]
392
+
393
+ if isinstance(indices[0], (int, bool)):
394
+ index: Union[int, Tensor] = int(as_key_tensor([indices[0]]))
395
+ indices = (index, ) + indices[1:]
396
+ elif isinstance(indices[0], (Tensor, list, np.ndarray)):
397
+ index = as_key_tensor(indices[0], device=self.device)
398
+ indices = (index, ) + indices[1:]
399
+
400
+ indices = indices[0] if len(indices) == 1 else indices
401
+
402
+ return super().__getitem__(indices)
403
+
342
404
 
343
405
  @implements(aten.alias.default)
344
406
  def _alias(tensor: HashTensor) -> HashTensor:
@@ -454,7 +516,8 @@ def _pin_memory(tensor: HashTensor) -> HashTensor:
454
516
  def _unsqueeze(tensor: HashTensor, dim: int) -> HashTensor:
455
517
  if dim == 0 or dim == -(tensor.dim() + 1):
456
518
  raise IndexError(f"Cannot unsqueeze '{tensor.__class__.__name__}' in "
457
- f"the first dimension")
519
+ f"the first dimension. Please call `as_tensor()` "
520
+ f"beforehand")
458
521
 
459
522
  return tensor._from_data(
460
523
  tensor._map,
@@ -555,22 +618,32 @@ _old_index_select = torch.index_select
555
618
 
556
619
  def _new_index_select(
557
620
  input: Tensor,
558
- dim: int,
621
+ dim: Union[int, str],
559
622
  index: Tensor,
560
- *,
561
623
  out: Optional[Tensor] = None,
562
624
  ) -> Tensor:
563
625
 
564
- if dim < -input.dim() or dim >= input.dim():
626
+ if isinstance(dim, int) and (dim < -input.dim() or dim >= input.dim()):
565
627
  raise IndexError(f"Dimension out of range (expected to be in range of "
566
628
  f"[{-input.dim()}, {input.dim()-1}], but got {dim})")
567
629
 
568
630
  # We convert any index tensor in the first dimension into a tensor. This
569
631
  # means that downstream handling (i.e. in `aten.index_select.default`)
570
632
  # needs to take this pre-conversion into account.
571
- if isinstance(input, HashTensor) and (dim == 0 or dim == -input.dim()):
633
+ if (not torch.jit.is_scripting() and isinstance(input, HashTensor)
634
+ and isinstance(dim, int) and (dim == 0 or dim == -input.dim())):
572
635
  index = as_key_tensor(index, device=input.device)
573
- return _old_index_select(input, dim, index, out=out)
636
+
637
+ if isinstance(dim, int): # Type narrowing...
638
+ if out is None:
639
+ return _old_index_select(input, dim, index)
640
+ else:
641
+ return _old_index_select(input, dim, index, out=out)
642
+ else:
643
+ if out is None:
644
+ return _old_index_select(input, dim, index)
645
+ else:
646
+ return _old_index_select(input, dim, index, out=out)
574
647
 
575
648
 
576
649
  torch.index_select = _new_index_select # type: ignore
@@ -603,20 +676,25 @@ _old_select = torch.select
603
676
 
604
677
  def _new_select(
605
678
  input: Tensor,
606
- dim: int,
679
+ dim: Union[int, str],
607
680
  index: int,
608
681
  ) -> Tensor:
609
682
 
610
- if dim < -input.dim() or dim >= input.dim():
683
+ if isinstance(dim, int) and (dim < -input.dim() or dim >= input.dim()):
611
684
  raise IndexError(f"Dimension out of range (expected to be in range of "
612
685
  f"[{-input.dim()}, {input.dim()-1}], but got {dim})")
613
686
 
614
687
  # We convert any index in the first dimension into an integer. This means
615
688
  # that downstream handling (i.e. in `aten.select.int`) needs to take this
616
689
  # pre-conversion into account.
617
- if isinstance(input, HashTensor) and (dim == 0 or dim == -input.dim()):
690
+ if (not torch.jit.is_scripting() and isinstance(input, HashTensor)
691
+ and isinstance(dim, int) and (dim == 0 or dim == -input.dim())):
618
692
  index = int(as_key_tensor([index]))
619
- return _old_select(input, dim, index)
693
+
694
+ if isinstance(dim, int): # Type narrowing...
695
+ return _old_select(input, dim, index)
696
+ else:
697
+ return _old_select(input, dim, index)
620
698
 
621
699
 
622
700
  torch.select = _new_select # type: ignore
@@ -645,3 +723,27 @@ def _select(
645
723
  num_keys=tensor.size(0),
646
724
  dtype=tensor.dtype,
647
725
  )
726
+
727
+
728
+ @implements(aten.index.Tensor)
729
+ def _index(
730
+ tensor: HashTensor,
731
+ indices: List[Optional[Tensor]],
732
+ ) -> Union[HashTensor, Tensor]:
733
+
734
+ assert len(indices) > 0
735
+
736
+ if indices[0] is not None:
737
+ out = tensor._get(indices[0])
738
+ if len(indices) > 1:
739
+ out = aten.index.Tensor(out, [None] + indices[1:])
740
+ return out
741
+
742
+ return tensor._from_data(
743
+ tensor._map,
744
+ aten.index.Tensor(tensor.as_tensor(), indices),
745
+ tensor._min_key,
746
+ tensor._max_key,
747
+ num_keys=tensor.size(0),
748
+ dtype=tensor.dtype,
749
+ )