pyg-nightly 2.7.0.dev20250325__py3-none-any.whl → 2.7.0.dev20250327__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyg_nightly-2.7.0.dev20250325.dist-info → pyg_nightly-2.7.0.dev20250327.dist-info}/METADATA +1 -1
- {pyg_nightly-2.7.0.dev20250325.dist-info → pyg_nightly-2.7.0.dev20250327.dist-info}/RECORD +6 -6
- {pyg_nightly-2.7.0.dev20250325.dist-info → pyg_nightly-2.7.0.dev20250327.dist-info}/WHEEL +1 -1
- torch_geometric/__init__.py +1 -1
- torch_geometric/utils/_negative_sampling.py +25 -9
- {pyg_nightly-2.7.0.dev20250325.dist-info → pyg_nightly-2.7.0.dev20250327.dist-info}/licenses/LICENSE +0 -0
{pyg_nightly-2.7.0.dev20250325.dist-info → pyg_nightly-2.7.0.dev20250327.dist-info}/METADATA
RENAMED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: pyg-nightly
|
3
|
-
Version: 2.7.0.
|
3
|
+
Version: 2.7.0.dev20250327
|
4
4
|
Summary: Graph Neural Network Library for PyTorch
|
5
5
|
Keywords: deep-learning,pytorch,geometric-deep-learning,graph-neural-networks,graph-convolutional-networks
|
6
6
|
Author-email: Matthias Fey <matthias@pyg.org>
|
@@ -1,4 +1,4 @@
|
|
1
|
-
torch_geometric/__init__.py,sha256=
|
1
|
+
torch_geometric/__init__.py,sha256=5IQEu8YEKzY-iYyw4MYVUOKt0JMAAwPxFs7wn456Ms8,1978
|
2
2
|
torch_geometric/_compile.py,sha256=f-WQeH4VLi5Hn9lrgztFUCSrN_FImjhQa6BxFzcYC38,1338
|
3
3
|
torch_geometric/_onnx.py,sha256=V9ffrIKSqhDw6xUZ12lkuSfNs48cQp2EeJ6Z19GfnVw,349
|
4
4
|
torch_geometric/backend.py,sha256=lVaf7aLoVaB3M-UcByUJ1G4T4FOK6LXAg0CF4W3E8jo,1575
|
@@ -589,7 +589,7 @@ torch_geometric/utils/_grid.py,sha256=1coutST2TMV9TSQcmpXze0GIK9odzZ9wBtbKs6u26D
|
|
589
589
|
torch_geometric/utils/_homophily.py,sha256=1nXxGUATFPB3icEGpvEWUiuYbjU9gDGtlWpuLbtWhJk,5090
|
590
590
|
torch_geometric/utils/_index_sort.py,sha256=FTJacmOsqgsyof7MJFHlVVdXhHOjR0j7siTb0UZ-YT0,1283
|
591
591
|
torch_geometric/utils/_lexsort.py,sha256=chMEJJRXqfE6-K4vrVszdr3c338EhMZyi0Q9IEJD3p0,1403
|
592
|
-
torch_geometric/utils/_negative_sampling.py,sha256=
|
592
|
+
torch_geometric/utils/_negative_sampling.py,sha256=G4O572zAQgQQlVMz6ihhE13HFKEekLLYVXcYp4ZSdcQ,15521
|
593
593
|
torch_geometric/utils/_normalize_edge_index.py,sha256=H6DY-Dzi1Psr3igG_nb0U3ZPNZz-BBDntO2iuA8FtzA,1682
|
594
594
|
torch_geometric/utils/_normalized_cut.py,sha256=uwVJkl-Q0tpY-w0nvcHajcQYcqFh1oDOf55XELdjJBU,1167
|
595
595
|
torch_geometric/utils/_one_hot.py,sha256=vXC7l7zudYRZIwWv6mT-Biuk2zKELyqteJXLynPocPM,1404
|
@@ -633,7 +633,7 @@ torch_geometric/utils/undirected.py,sha256=H_nfpI0_WluOG6VfjPyldvcjL4w5USAKWu2x5
|
|
633
633
|
torch_geometric/visualization/__init__.py,sha256=PyR_4K5SafsJrBr6qWrkjKr6GBL1b7FtZybyXCDEVwY,154
|
634
634
|
torch_geometric/visualization/graph.py,sha256=ZuLPL92yGRi7lxlqsUPwL_EVVXF7P2kMcveTtW79vpA,4784
|
635
635
|
torch_geometric/visualization/influence.py,sha256=CWMvuNA_Nf1sfbJmQgn58yS4OFpeKXeZPe7kEuvkUBw,477
|
636
|
-
pyg_nightly-2.7.0.
|
637
|
-
pyg_nightly-2.7.0.
|
638
|
-
pyg_nightly-2.7.0.
|
639
|
-
pyg_nightly-2.7.0.
|
636
|
+
pyg_nightly-2.7.0.dev20250327.dist-info/licenses/LICENSE,sha256=ic-27cMJc1kWoMEYncz3Ya3Ur2Bi3bNLWib2DT763-o,1067
|
637
|
+
pyg_nightly-2.7.0.dev20250327.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
638
|
+
pyg_nightly-2.7.0.dev20250327.dist-info/METADATA,sha256=QHeZLWk6bzSKdmuCxjDYsCxE6Y_ny6HnTIemg7Z4624,63021
|
639
|
+
pyg_nightly-2.7.0.dev20250327.dist-info/RECORD,,
|
torch_geometric/__init__.py
CHANGED
@@ -31,7 +31,7 @@ from .lazy_loader import LazyLoader
|
|
31
31
|
contrib = LazyLoader('contrib', globals(), 'torch_geometric.contrib')
|
32
32
|
graphgym = LazyLoader('graphgym', globals(), 'torch_geometric.graphgym')
|
33
33
|
|
34
|
-
__version__ = '2.7.0.
|
34
|
+
__version__ = '2.7.0.dev20250327'
|
35
35
|
|
36
36
|
__all__ = [
|
37
37
|
'Index',
|
@@ -12,7 +12,7 @@ from torch_geometric.utils.num_nodes import maybe_num_nodes
|
|
12
12
|
def negative_sampling(
|
13
13
|
edge_index: Tensor,
|
14
14
|
num_nodes: Optional[Union[int, Tuple[int, int]]] = None,
|
15
|
-
num_neg_samples: Optional[int] = None,
|
15
|
+
num_neg_samples: Optional[Union[int, float]] = None,
|
16
16
|
method: str = "sparse",
|
17
17
|
force_undirected: bool = False,
|
18
18
|
) -> Tensor:
|
@@ -25,10 +25,12 @@ def negative_sampling(
|
|
25
25
|
If given as a tuple, then :obj:`edge_index` is interpreted as a
|
26
26
|
bipartite graph with shape :obj:`(num_src_nodes, num_dst_nodes)`.
|
27
27
|
(default: :obj:`None`)
|
28
|
-
num_neg_samples (int, optional): The (approximate) number of
|
29
|
-
samples to return.
|
30
|
-
|
31
|
-
positive
|
28
|
+
num_neg_samples (int or float, optional): The (approximate) number of
|
29
|
+
negative samples to return. If set to a floating-point value, it
|
30
|
+
represents the ratio of negative samples to generate based on the
|
31
|
+
number of positive edges. If set to :obj:`None`, will try to
|
32
|
+
return a negative edge for every positive edge.
|
33
|
+
(default: :obj:`None`)
|
32
34
|
method (str, optional): The method to use for negative sampling,
|
33
35
|
*i.e.* :obj:`"sparse"` or :obj:`"dense"`.
|
34
36
|
This is a memory/runtime trade-off.
|
@@ -48,6 +50,11 @@ def negative_sampling(
|
|
48
50
|
tensor([[3, 0, 0, 3],
|
49
51
|
[2, 3, 2, 1]])
|
50
52
|
|
53
|
+
>>> negative_sampling(edge_index, num_nodes=(3, 4),
|
54
|
+
... num_neg_samples=0.5) # 50% of positive edges
|
55
|
+
tensor([[0, 3],
|
56
|
+
[3, 0]])
|
57
|
+
|
51
58
|
>>> # For bipartite graph
|
52
59
|
>>> negative_sampling(edge_index, num_nodes=(3, 4))
|
53
60
|
tensor([[0, 2, 2, 1],
|
@@ -74,6 +81,8 @@ def negative_sampling(
|
|
74
81
|
|
75
82
|
if num_neg_samples is None:
|
76
83
|
num_neg_samples = edge_index.size(1)
|
84
|
+
elif isinstance(num_neg_samples, float):
|
85
|
+
num_neg_samples = int(num_neg_samples * edge_index.size(1))
|
77
86
|
if force_undirected:
|
78
87
|
num_neg_samples = num_neg_samples // 2
|
79
88
|
|
@@ -117,7 +126,7 @@ def negative_sampling(
|
|
117
126
|
def batched_negative_sampling(
|
118
127
|
edge_index: Tensor,
|
119
128
|
batch: Union[Tensor, Tuple[Tensor, Tensor]],
|
120
|
-
num_neg_samples: Optional[int] = None,
|
129
|
+
num_neg_samples: Optional[Union[int, float]] = None,
|
121
130
|
method: str = "sparse",
|
122
131
|
force_undirected: bool = False,
|
123
132
|
) -> Tensor:
|
@@ -131,9 +140,11 @@ def batched_negative_sampling(
|
|
131
140
|
node to a specific example.
|
132
141
|
If given as a tuple, then :obj:`edge_index` is interpreted as a
|
133
142
|
bipartite graph connecting two different node types.
|
134
|
-
num_neg_samples (int, optional): The number of negative
|
135
|
-
return. If set to :obj:`None`, will try to return a
|
136
|
-
for every positive edge.
|
143
|
+
num_neg_samples (int or float, optional): The number of negative
|
144
|
+
samples to return. If set to :obj:`None`, will try to return a
|
145
|
+
negative edge for every positive edge. If float, it will generate
|
146
|
+
:obj:`num_neg_samples * num_edges` negative samples.
|
147
|
+
(default: :obj:`None`)
|
137
148
|
method (str, optional): The method to use for negative sampling,
|
138
149
|
*i.e.* :obj:`"sparse"` or :obj:`"dense"`.
|
139
150
|
This is a memory/runtime trade-off.
|
@@ -157,6 +168,11 @@ def batched_negative_sampling(
|
|
157
168
|
tensor([[3, 1, 3, 2, 7, 7, 6, 5],
|
158
169
|
[2, 0, 1, 1, 5, 6, 4, 4]])
|
159
170
|
|
171
|
+
>>> # Using float multiplier for negative samples
|
172
|
+
>>> batched_negative_sampling(edge_index, batch, num_neg_samples=1.5)
|
173
|
+
tensor([[3, 1, 3, 2, 7, 7, 6, 5, 2, 0, 1, 1],
|
174
|
+
[2, 0, 1, 1, 5, 6, 4, 4, 3, 2, 3, 0]])
|
175
|
+
|
160
176
|
>>> # For bipartite graph
|
161
177
|
>>> edge_index1 = torch.as_tensor([[0, 0, 1, 1], [0, 1, 2, 3]])
|
162
178
|
>>> edge_index2 = edge_index1 + torch.tensor([[2], [4]])
|
{pyg_nightly-2.7.0.dev20250325.dist-info → pyg_nightly-2.7.0.dev20250327.dist-info}/licenses/LICENSE
RENAMED
File without changes
|