pyg-nightly 2.7.0.dev20250529__py3-none-any.whl → 2.7.0.dev20250531__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyg_nightly-2.7.0.dev20250529.dist-info → pyg_nightly-2.7.0.dev20250531.dist-info}/METADATA +12 -12
- {pyg_nightly-2.7.0.dev20250529.dist-info → pyg_nightly-2.7.0.dev20250531.dist-info}/RECORD +15 -15
- torch_geometric/__init__.py +1 -1
- torch_geometric/datasets/hgb_dataset.py +2 -2
- torch_geometric/datasets/hm.py +1 -1
- torch_geometric/datasets/qm7.py +1 -1
- torch_geometric/datasets/snap_dataset.py +8 -4
- torch_geometric/edge_index.py +3 -3
- torch_geometric/hash_tensor.py +3 -3
- torch_geometric/index.py +2 -2
- torch_geometric/transforms/add_positional_encoding.py +1 -1
- torch_geometric/typing.py +5 -3
- torch_geometric/utils/_trim_to_layer.py +2 -2
- {pyg_nightly-2.7.0.dev20250529.dist-info → pyg_nightly-2.7.0.dev20250531.dist-info}/WHEEL +0 -0
- {pyg_nightly-2.7.0.dev20250529.dist-info → pyg_nightly-2.7.0.dev20250531.dist-info}/licenses/LICENSE +0 -0
{pyg_nightly-2.7.0.dev20250529.dist-info → pyg_nightly-2.7.0.dev20250531.dist-info}/METADATA
RENAMED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: pyg-nightly
|
3
|
-
Version: 2.7.0.
|
3
|
+
Version: 2.7.0.dev20250531
|
4
4
|
Summary: Graph Neural Network Library for PyTorch
|
5
5
|
Keywords: deep-learning,pytorch,geometric-deep-learning,graph-neural-networks,graph-convolutional-networks
|
6
6
|
Author-email: Matthias Fey <matthias@pyg.org>
|
@@ -444,39 +444,39 @@ We recommend to start with a minimal installation, and install additional depend
|
|
444
444
|
|
445
445
|
For ease of installation of these extensions, we provide `pip` wheels for all major OS/PyTorch/CUDA combinations, see [here](https://data.pyg.org/whl).
|
446
446
|
|
447
|
-
#### PyTorch 2.
|
447
|
+
#### PyTorch 2.7
|
448
448
|
|
449
|
-
To install the binaries for PyTorch 2.
|
449
|
+
To install the binaries for PyTorch 2.7.0, simply run
|
450
450
|
|
451
451
|
```
|
452
|
-
pip install pyg_lib torch_scatter torch_sparse torch_cluster torch_spline_conv -f https://data.pyg.org/whl/torch-2.
|
452
|
+
pip install pyg_lib torch_scatter torch_sparse torch_cluster torch_spline_conv -f https://data.pyg.org/whl/torch-2.7.0+${CUDA}.html
|
453
453
|
```
|
454
454
|
|
455
|
-
where `${CUDA}` should be replaced by either `cpu`, `cu118`, `
|
455
|
+
where `${CUDA}` should be replaced by either `cpu`, `cu118`, `cu126`, or `cu128` depending on your PyTorch installation.
|
456
456
|
|
457
|
-
| | `cpu` | `cu118` | `
|
457
|
+
| | `cpu` | `cu118` | `cu126` | `cu128` |
|
458
458
|
| ----------- | ----- | ------- | ------- | ------- |
|
459
459
|
| **Linux** | ✅ | ✅ | ✅ | ✅ |
|
460
460
|
| **Windows** | ✅ | ✅ | ✅ | ✅ |
|
461
461
|
| **macOS** | ✅ | | | |
|
462
462
|
|
463
|
-
#### PyTorch 2.
|
463
|
+
#### PyTorch 2.6
|
464
464
|
|
465
|
-
To install the binaries for PyTorch 2.
|
465
|
+
To install the binaries for PyTorch 2.6.0, simply run
|
466
466
|
|
467
467
|
```
|
468
|
-
pip install pyg_lib torch_scatter torch_sparse torch_cluster torch_spline_conv -f https://data.pyg.org/whl/torch-2.
|
468
|
+
pip install pyg_lib torch_scatter torch_sparse torch_cluster torch_spline_conv -f https://data.pyg.org/whl/torch-2.6.0+${CUDA}.html
|
469
469
|
```
|
470
470
|
|
471
|
-
where `${CUDA}` should be replaced by either `cpu`, `cu118`, `
|
471
|
+
where `${CUDA}` should be replaced by either `cpu`, `cu118`, `cu124`, or `cu126` depending on your PyTorch installation.
|
472
472
|
|
473
|
-
| | `cpu` | `cu118` | `
|
473
|
+
| | `cpu` | `cu118` | `cu124` | `cu126` |
|
474
474
|
| ----------- | ----- | ------- | ------- | ------- |
|
475
475
|
| **Linux** | ✅ | ✅ | ✅ | ✅ |
|
476
476
|
| **Windows** | ✅ | ✅ | ✅ | ✅ |
|
477
477
|
| **macOS** | ✅ | | | |
|
478
478
|
|
479
|
-
**Note:** Binaries of older versions are also provided for PyTorch 1.4.0, PyTorch 1.5.0, PyTorch 1.6.0, PyTorch 1.7.0/1.7.1, PyTorch 1.8.0/1.8.1, PyTorch 1.9.0, PyTorch 1.10.0/1.10.1/1.10.2, PyTorch 1.11.0, PyTorch 1.12.0/1.12.1, PyTorch 1.13.0/1.13.1, PyTorch 2.0.0/2.0.1, PyTorch 2.1.0/2.1.1/2.1.2, PyTorch 2.2.0/2.2.1/2.2.2, PyTorch 2.3.0/2.3.1,
|
479
|
+
**Note:** Binaries of older versions are also provided for PyTorch 1.4.0, PyTorch 1.5.0, PyTorch 1.6.0, PyTorch 1.7.0/1.7.1, PyTorch 1.8.0/1.8.1, PyTorch 1.9.0, PyTorch 1.10.0/1.10.1/1.10.2, PyTorch 1.11.0, PyTorch 1.12.0/1.12.1, PyTorch 1.13.0/1.13.1, PyTorch 2.0.0/2.0.1, PyTorch 2.1.0/2.1.1/2.1.2, PyTorch 2.2.0/2.2.1/2.2.2, PyTorch 2.3.0/2.3.1, PyTorch 2.4.0/2.4.1, and PyTorch 2.5.0/2.5.1 (following the same procedure).
|
480
480
|
**For older versions, you might need to explicitly specify the latest supported version number** or install via `pip install --no-index` in order to prevent a manual installation from source.
|
481
481
|
You can look up the latest supported version number [here](https://data.pyg.org/whl).
|
482
482
|
|
@@ -1,4 +1,4 @@
|
|
1
|
-
torch_geometric/__init__.py,sha256=
|
1
|
+
torch_geometric/__init__.py,sha256=ATk4zdv-Np1gZlQB0z4Jxuj5My8yOgPOpXNFtYECdP8,2255
|
2
2
|
torch_geometric/_compile.py,sha256=f-WQeH4VLi5Hn9lrgztFUCSrN_FImjhQa6BxFzcYC38,1338
|
3
3
|
torch_geometric/_onnx.py,sha256=V9ffrIKSqhDw6xUZ12lkuSfNs48cQp2EeJ6Z19GfnVw,349
|
4
4
|
torch_geometric/backend.py,sha256=lVaf7aLoVaB3M-UcByUJ1G4T4FOK6LXAg0CF4W3E8jo,1575
|
@@ -7,11 +7,11 @@ torch_geometric/config_store.py,sha256=zdMzlgBpUmBkPovpYQh5fMNwTZLDq2OneqX47QEx7
|
|
7
7
|
torch_geometric/debug.py,sha256=cLyH9OaL2v7POyW-80b19w-ctA7a_5EZsS4aUF1wc2U,1295
|
8
8
|
torch_geometric/deprecation.py,sha256=dWRymDIUkUVI2MeEmBG5WF4R6jObZeseSBV9G6FNfjc,858
|
9
9
|
torch_geometric/device.py,sha256=tU5-_lBNVbVHl_kUmWPwiG5mQ1pyapwMF4JkmtNN3MM,1224
|
10
|
-
torch_geometric/edge_index.py,sha256=
|
10
|
+
torch_geometric/edge_index.py,sha256=RbIwLhtoLXmkQ_DqThBCwi1JH7zNRTsuVj0X-sTYlWE,70094
|
11
11
|
torch_geometric/experimental.py,sha256=JbtNNEXjFGI8hZ9raM6-qrZURP6Z5nlDK8QicZUIbz0,4756
|
12
|
-
torch_geometric/hash_tensor.py,sha256=
|
12
|
+
torch_geometric/hash_tensor.py,sha256=YtIGQ29nnR1nXp3NGnm4zR7VpqXteqzyVt36IFtZsHc,24922
|
13
13
|
torch_geometric/home.py,sha256=EV54B4Dmiv61GDbkCwtCfWGWJ4eFGwZ8s3KOgGjwYgY,790
|
14
|
-
torch_geometric/index.py,sha256=
|
14
|
+
torch_geometric/index.py,sha256=FSP-Tkfw6d7P5U8ua8BO2IoEPJSnJT0L2IKyAfIqJR0,24360
|
15
15
|
torch_geometric/inspector.py,sha256=nKi5o4Mn6xsG0Ex1GudTEQt_EqnF9mcMqGtp7Shh9sQ,19336
|
16
16
|
torch_geometric/isinstance.py,sha256=truZjdU9PxSvjJ6k0d_CLJ2iOpen2o8U-54pbUbNRyE,935
|
17
17
|
torch_geometric/lazy_loader.py,sha256=SM0UcXtIdiFge75MKBAWXedoiSOdFDOV0rm1PfoF9cE,908
|
@@ -19,7 +19,7 @@ torch_geometric/logging.py,sha256=HmHHLiCcM64k-6UYNOSfXPIeSGNAyiGGcn8cD8tlyuQ,85
|
|
19
19
|
torch_geometric/resolver.py,sha256=fn-_6mCpI2xv7eDZnIFcYrHOn0IrwbkWFLDb9laQrWI,1270
|
20
20
|
torch_geometric/seed.py,sha256=MJLbVwpb9i8mK3oi32sS__Cq-dRq_afTeoOL_HoA9ko,372
|
21
21
|
torch_geometric/template.py,sha256=rqjDWgcSAgTCiV4bkOjWRPaO4PpUdC_RXigzxxBqAu8,1060
|
22
|
-
torch_geometric/typing.py,sha256=
|
22
|
+
torch_geometric/typing.py,sha256=QspEdob3sBcx9qenTAO7J7KWDTu6ufQ4XYf_eqZ8k8I,15638
|
23
23
|
torch_geometric/warnings.py,sha256=SB9dWGovX_KKcxqsOrdTDvSb_j0NoB5vPGnK2vg0jVw,727
|
24
24
|
torch_geometric/contrib/__init__.py,sha256=0pWkmXfZtbdr-AKwlii5LTFggTEH-MCrSKpZxrtPlVs,352
|
25
25
|
torch_geometric/contrib/datasets/__init__.py,sha256=lrGnWsEiJf5zsBRmshGZZFN_uYR2ezDjbj9n9nCpvtk,23
|
@@ -99,8 +99,8 @@ torch_geometric/datasets/git_mol_dataset.py,sha256=LsS_dPYUpwhWXMBh17iT7IbjlLOP0
|
|
99
99
|
torch_geometric/datasets/github.py,sha256=Qhqhkvi6eZ8VF_HqP1rL2iYToZavFNsQh7J1WdeM9dA,2687
|
100
100
|
torch_geometric/datasets/gnn_benchmark_dataset.py,sha256=4P8n7czF-gf1egLYlAcSSvfB0GXIKpAbH5UjsuFld1M,6976
|
101
101
|
torch_geometric/datasets/heterophilous_graph_dataset.py,sha256=yHHtwl4uPrid0vPOxvPV3sIS8HWdswar8FJ0h0OQ9is,4224
|
102
|
-
torch_geometric/datasets/hgb_dataset.py,sha256=
|
103
|
-
torch_geometric/datasets/hm.py,sha256=
|
102
|
+
torch_geometric/datasets/hgb_dataset.py,sha256=TielMHTK2sk8YW8xAFgmlKoJSyArqVhcL51bVs1VTJc,8812
|
103
|
+
torch_geometric/datasets/hm.py,sha256=Tpw7JNZmd_d-E5dc0RajMI1vRE3haR1h0xhOxQpkxis,6779
|
104
104
|
torch_geometric/datasets/hydro_net.py,sha256=7dEH7Vgfwa-BxkpkXdIx3LvmudJhCR17omkpvPm62dg,11417
|
105
105
|
torch_geometric/datasets/icews.py,sha256=Vdlk-PD10AU68xq8X5IOgrK0wgIBFq8A0D6_WtrXiEo,4735
|
106
106
|
torch_geometric/datasets/igmc_dataset.py,sha256=pMiOoXjvqhfsDDNw51WT_IVi6wGJ0cUNwTdpEprPh3E,4611
|
@@ -139,7 +139,7 @@ torch_geometric/datasets/pcqm4m.py,sha256=7ID_xXXIAyuNzYLI2lBWygZl9wGos-dbaz1b6E
|
|
139
139
|
torch_geometric/datasets/planetoid.py,sha256=RksfwR_PI7qGVphs-T-4jXDepYwQCweMXElLm096hgg,7201
|
140
140
|
torch_geometric/datasets/polblogs.py,sha256=IYzsvd4R0OojmOOZUoOdCwQYfwcTfth1PNtcBK1yOGc,3045
|
141
141
|
torch_geometric/datasets/ppi.py,sha256=zPtg-omC7WYvr9Tzwkb7zNjpXLODsvxKxKdGEUswp2E,5030
|
142
|
-
torch_geometric/datasets/qm7.py,sha256=
|
142
|
+
torch_geometric/datasets/qm7.py,sha256=bYyK8xlh9kTr5vqueNbLu9EAjIXkQH1KX1VWnjKfOJc,3323
|
143
143
|
torch_geometric/datasets/qm9.py,sha256=XU2HTPbgJJ_6hT--X0J2xkXliCbt7_-hub9nuIUQlug,17213
|
144
144
|
torch_geometric/datasets/rcdd.py,sha256=gvOoM1tw_X5QMyBB4FkMUwNErMXAvImyjz5twktBAh8,5317
|
145
145
|
torch_geometric/datasets/reddit.py,sha256=QUgiKTaj6YTOYbgWgqV8mPYsctOui2ujaM8f8qy81v0,3131
|
@@ -149,7 +149,7 @@ torch_geometric/datasets/s3dis.py,sha256=_r9vSX8prt4q_N_4xry_Mwqyf1vXIptAiONrt_6
|
|
149
149
|
torch_geometric/datasets/sbm_dataset.py,sha256=-dORNkinyxGwaPtCVpudnktzMppaChTraqwYd6FA6DM,8816
|
150
150
|
torch_geometric/datasets/shapenet.py,sha256=tn3HiQQAr6lxHrqxfOVaAtl40guwFYTXWCbSoLfbB8M,8496
|
151
151
|
torch_geometric/datasets/shrec2016.py,sha256=cTLhctbqE0EUEvKddJFhPzDb1oLKXOth4O_WzsWtyMk,6323
|
152
|
-
torch_geometric/datasets/snap_dataset.py,sha256=
|
152
|
+
torch_geometric/datasets/snap_dataset.py,sha256=deJvB6cpIQ3bu_pcWoqgEo1-Kl_NcFi7ZSUci645X0U,9481
|
153
153
|
torch_geometric/datasets/suite_sparse.py,sha256=eqjH4vAUq872qdk3YdLkZSwlu6r7HHpTgK0vEVGmY1s,3278
|
154
154
|
torch_geometric/datasets/tag_dataset.py,sha256=MbnVCJcryNys1cjdMzXbuk-Rn-BjLbHWFGHx4QYhhUg,14760
|
155
155
|
torch_geometric/datasets/taobao.py,sha256=CUcZpbWsNTasevflO8zqP0YvENy89P7wpKS4MHaDJ6Q,4170
|
@@ -527,7 +527,7 @@ torch_geometric/testing/graph_store.py,sha256=00B7QToCIspYmgN7svQKp1iU-qAzEtrt3V
|
|
527
527
|
torch_geometric/transforms/__init__.py,sha256=P0R2CFg9pXxjTX4NnYfNPrifRPAw5lVXEOxO80q-1Ek,4296
|
528
528
|
torch_geometric/transforms/add_gpse.py,sha256=ex7cSLmcSIn-dC5gbic-CLwKegV5Is7Y8mUn-mSOWXg,1555
|
529
529
|
torch_geometric/transforms/add_metapaths.py,sha256=GabaPRvUnpFrZJsxLMUBY2Egzx94GTgsMxegL_qTtbk,14239
|
530
|
-
torch_geometric/transforms/add_positional_encoding.py,sha256=
|
530
|
+
torch_geometric/transforms/add_positional_encoding.py,sha256=J5dmdrpZ7Qc2n2rcvsAz084uhMGPTtD1xGl973vdiHY,6033
|
531
531
|
torch_geometric/transforms/add_remaining_self_loops.py,sha256=ItU5FAcE-mkbp_wqTLkRhv0RShR5JVr8vr9d5xv3_Ak,2085
|
532
532
|
torch_geometric/transforms/add_self_loops.py,sha256=No8-tMqERQdWVHwEOaYr9aeg1A_RLisiidEy-1wzoV8,2024
|
533
533
|
torch_geometric/transforms/base_transform.py,sha256=5y4X5JmpKrJsj9XQ8v_CYPcDB83pq7b1g5RLjeBrxWg,1298
|
@@ -609,7 +609,7 @@ torch_geometric/utils/_to_dense_adj.py,sha256=hl1sboUBvED5Er66bqLms4VdmxKA-7Y3oz
|
|
609
609
|
torch_geometric/utils/_to_dense_batch.py,sha256=-K5NjjfvjKYKJQ3kXgNIDR7lwMJ_GGISI45b50IGMvY,4582
|
610
610
|
torch_geometric/utils/_train_test_split_edges.py,sha256=KnBDgnaKuJYTHUOIlvFtzvkHUe-93DG3ckST4-wOERM,3569
|
611
611
|
torch_geometric/utils/_tree_decomposition.py,sha256=ZtpjPQJgXbQWtSWjo-Fmhrov0DGO69TfQb9oBFvZ6dM,5304
|
612
|
-
torch_geometric/utils/_trim_to_layer.py,sha256=
|
612
|
+
torch_geometric/utils/_trim_to_layer.py,sha256=cauOEzMJJK4w9BC-Pg1bHVncBYqG9XxQex3rn10BFjc,8339
|
613
613
|
torch_geometric/utils/_unbatch.py,sha256=B0vjKI96PtHvSBG8F_lqvsiJE134aVjUurPZsG6UZRI,2378
|
614
614
|
torch_geometric/utils/augmentation.py,sha256=1F0YCuaklZ9ZbXxdFV0oOoemWvLd8p60WvFo2chzl7E,8600
|
615
615
|
torch_geometric/utils/convert.py,sha256=0KEJoBOzU-w-mMQu9QYaMhUqcrGBxBmeRl0hv8NPvII,21697
|
@@ -638,7 +638,7 @@ torch_geometric/utils/undirected.py,sha256=H_nfpI0_WluOG6VfjPyldvcjL4w5USAKWu2x5
|
|
638
638
|
torch_geometric/visualization/__init__.py,sha256=b-HnVesXjyJ_L1N-DnjiRiRVf7lhwKaBQF_2i5YMVSU,208
|
639
639
|
torch_geometric/visualization/graph.py,sha256=mfZHXYfiU-CWMtfawYc80IxVwVmtK9hbIkSKhM_j7oI,14311
|
640
640
|
torch_geometric/visualization/influence.py,sha256=CWMvuNA_Nf1sfbJmQgn58yS4OFpeKXeZPe7kEuvkUBw,477
|
641
|
-
pyg_nightly-2.7.0.
|
642
|
-
pyg_nightly-2.7.0.
|
643
|
-
pyg_nightly-2.7.0.
|
644
|
-
pyg_nightly-2.7.0.
|
641
|
+
pyg_nightly-2.7.0.dev20250531.dist-info/licenses/LICENSE,sha256=ic-27cMJc1kWoMEYncz3Ya3Ur2Bi3bNLWib2DT763-o,1067
|
642
|
+
pyg_nightly-2.7.0.dev20250531.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
643
|
+
pyg_nightly-2.7.0.dev20250531.dist-info/METADATA,sha256=64gHJ8lq6nkXhv-ggwHjW8aPjmjEMCU3n2MrZQeJYFo,62967
|
644
|
+
pyg_nightly-2.7.0.dev20250531.dist-info/RECORD,,
|
torch_geometric/__init__.py
CHANGED
@@ -31,7 +31,7 @@ from .lazy_loader import LazyLoader
|
|
31
31
|
contrib = LazyLoader('contrib', globals(), 'torch_geometric.contrib')
|
32
32
|
graphgym = LazyLoader('graphgym', globals(), 'torch_geometric.graphgym')
|
33
33
|
|
34
|
-
__version__ = '2.7.0.
|
34
|
+
__version__ = '2.7.0.dev20250531'
|
35
35
|
|
36
36
|
__all__ = [
|
37
37
|
'Index',
|
@@ -123,8 +123,8 @@ class HGBDataset(InMemoryDataset):
|
|
123
123
|
start = info.index('LINK\tSTART\tEND\tMEANING') + 1
|
124
124
|
end = info[start:].index('')
|
125
125
|
for key, row in enumerate(info[start:start + end]):
|
126
|
-
|
127
|
-
src, dst, rel = (v for v in
|
126
|
+
edge = row.split('\t')[1:]
|
127
|
+
src, dst, rel = (v for v in edge if v != '')
|
128
128
|
src, dst = n_types[int(src)], n_types[int(dst)]
|
129
129
|
rel = rel.split('-')[1]
|
130
130
|
e_types[key] = (src, rel, dst)
|
torch_geometric/datasets/hm.py
CHANGED
@@ -81,7 +81,7 @@ class HM(InMemoryDataset):
|
|
81
81
|
xs.append(torch.from_numpy(x).to(torch.float))
|
82
82
|
|
83
83
|
x = torch.from_numpy(df['age'].values).to(torch.float).view(-1, 1)
|
84
|
-
x = x.nan_to_num(nan=x.nanmean())
|
84
|
+
x = x.nan_to_num(nan=x.nanmean()) # type: ignore
|
85
85
|
xs.append(x / x.max())
|
86
86
|
|
87
87
|
data['customer'].x = torch.cat(xs, dim=-1)
|
torch_geometric/datasets/qm7.py
CHANGED
@@ -84,7 +84,7 @@ class QM7b(InMemoryDataset):
|
|
84
84
|
edge_attr = coulomb_matrix[i, edge_index[0], edge_index[1]]
|
85
85
|
y = target[i].view(1, -1)
|
86
86
|
data = Data(edge_index=edge_index, edge_attr=edge_attr, y=y)
|
87
|
-
data.num_nodes = edge_index.max()
|
87
|
+
data.num_nodes = int(edge_index.max()) + 1
|
88
88
|
data_list.append(data)
|
89
89
|
|
90
90
|
if self.pre_filter is not None:
|
@@ -109,7 +109,7 @@ def read_ego(files: List[str], name: str) -> List[EgoData]:
|
|
109
109
|
row = torch.cat([row, row_ego, col_ego], dim=0)
|
110
110
|
col = torch.cat([col, col_ego, row_ego], dim=0)
|
111
111
|
edge_index = torch.stack([row, col], dim=0)
|
112
|
-
edge_index = coalesce(edge_index, num_nodes=N)
|
112
|
+
edge_index = coalesce(edge_index, num_nodes=int(N))
|
113
113
|
|
114
114
|
data = EgoData(x=x, edge_index=edge_index, circle=circle,
|
115
115
|
circle_batch=circle_batch)
|
@@ -129,7 +129,7 @@ def read_soc(files: List[str], name: str) -> List[Data]:
|
|
129
129
|
edge_index = pd.read_csv(files[0], sep='\t', header=None,
|
130
130
|
skiprows=skiprows, dtype=np.int64)
|
131
131
|
edge_index = torch.from_numpy(edge_index.values).t()
|
132
|
-
num_nodes = edge_index.max()
|
132
|
+
num_nodes = int(edge_index.max()) + 1
|
133
133
|
edge_index = coalesce(edge_index, num_nodes=num_nodes)
|
134
134
|
|
135
135
|
return [Data(edge_index=edge_index, num_nodes=num_nodes)]
|
@@ -143,11 +143,15 @@ def read_wiki(files: List[str], name: str) -> List[Data]:
|
|
143
143
|
edge_index = torch.from_numpy(edge_index.values).t()
|
144
144
|
|
145
145
|
idx = torch.unique(edge_index.flatten())
|
146
|
-
idx_assoc = torch.full(
|
146
|
+
idx_assoc = torch.full(
|
147
|
+
(edge_index.max() + 1, ), # type: ignore
|
148
|
+
-1,
|
149
|
+
dtype=torch.long,
|
150
|
+
)
|
147
151
|
idx_assoc[idx] = torch.arange(idx.size(0))
|
148
152
|
|
149
153
|
edge_index = idx_assoc[edge_index]
|
150
|
-
num_nodes = edge_index.max()
|
154
|
+
num_nodes = int(edge_index.max()) + 1
|
151
155
|
edge_index = coalesce(edge_index, num_nodes=num_nodes)
|
152
156
|
|
153
157
|
return [Data(edge_index=edge_index, num_nodes=num_nodes)]
|
torch_geometric/edge_index.py
CHANGED
@@ -803,7 +803,7 @@ class EdgeIndex(Tensor):
|
|
803
803
|
|
804
804
|
size = self.get_sparse_size()
|
805
805
|
if value is not None and value.dim() > 1:
|
806
|
-
size = size + value.size()[1:]
|
806
|
+
size = size + value.size()[1:]
|
807
807
|
|
808
808
|
out = torch.full(size, fill_value, dtype=dtype, device=self.device)
|
809
809
|
out[self._data[0], self._data[1]] = value if value is not None else 1
|
@@ -1186,10 +1186,10 @@ class EdgeIndex(Tensor):
|
|
1186
1186
|
return edge_index
|
1187
1187
|
|
1188
1188
|
# Prevent auto-wrapping outputs back into the proper subclass type:
|
1189
|
-
__torch_function__ = torch._C._disabled_torch_function_impl
|
1189
|
+
__torch_function__ = torch._C._disabled_torch_function_impl # type: ignore
|
1190
1190
|
|
1191
1191
|
@classmethod
|
1192
|
-
def __torch_dispatch__(
|
1192
|
+
def __torch_dispatch__( # type: ignore
|
1193
1193
|
cls: Type,
|
1194
1194
|
func: Callable[..., Any],
|
1195
1195
|
types: Iterable[Type[Any]],
|
torch_geometric/hash_tensor.py
CHANGED
@@ -326,10 +326,10 @@ class HashTensor(Tensor):
|
|
326
326
|
# PyTorch/Python builtins #################################################
|
327
327
|
|
328
328
|
# Prevent auto-wrapping outputs back into the proper subclass type:
|
329
|
-
__torch_function__ = torch._C._disabled_torch_function_impl
|
329
|
+
__torch_function__ = torch._C._disabled_torch_function_impl # type: ignore
|
330
330
|
|
331
331
|
@classmethod
|
332
|
-
def __torch_dispatch__(
|
332
|
+
def __torch_dispatch__( # type: ignore
|
333
333
|
cls: Type,
|
334
334
|
func: Callable[..., Any],
|
335
335
|
types: Iterable[Type[Any]],
|
@@ -416,7 +416,7 @@ class HashTensor(Tensor):
|
|
416
416
|
"""""" # noqa: D419
|
417
417
|
return self._min_key.is_shared()
|
418
418
|
|
419
|
-
def detach_(self) -> 'HashTensor':
|
419
|
+
def detach_(self) -> 'HashTensor':
|
420
420
|
"""""" # noqa: D419
|
421
421
|
if self._value is not None:
|
422
422
|
self._value.detach_()
|
torch_geometric/index.py
CHANGED
@@ -361,10 +361,10 @@ class Index(Tensor):
|
|
361
361
|
return index
|
362
362
|
|
363
363
|
# Prevent auto-wrapping outputs back into the proper subclass type:
|
364
|
-
__torch_function__ = torch._C._disabled_torch_function_impl
|
364
|
+
__torch_function__ = torch._C._disabled_torch_function_impl # type: ignore
|
365
365
|
|
366
366
|
@classmethod
|
367
|
-
def __torch_dispatch__(
|
367
|
+
def __torch_dispatch__( # type: ignore
|
368
368
|
cls: Type,
|
369
369
|
func: Callable[..., Any],
|
370
370
|
types: Iterable[Type[Any]],
|
@@ -97,7 +97,7 @@ class AddLaplacianEigenvectorPE(BaseTransform):
|
|
97
97
|
from scipy.sparse.linalg import eigs, eigsh
|
98
98
|
eig_fn = eigs if not self.is_undirected else eigsh
|
99
99
|
|
100
|
-
eig_vals, eig_vecs = eig_fn(
|
100
|
+
eig_vals, eig_vecs = eig_fn(
|
101
101
|
L,
|
102
102
|
k=self.k + 1,
|
103
103
|
which='SR' if not self.is_undirected else 'SA',
|
torch_geometric/typing.py
CHANGED
@@ -10,7 +10,7 @@ import torch
|
|
10
10
|
from torch import Tensor
|
11
11
|
|
12
12
|
try:
|
13
|
-
from typing import TypeAlias
|
13
|
+
from typing import TypeAlias
|
14
14
|
except ImportError:
|
15
15
|
from typing_extensions import TypeAlias
|
16
16
|
|
@@ -21,6 +21,8 @@ WITH_PT23 = WITH_PT20 and int(torch.__version__.split('.')[1]) >= 3
|
|
21
21
|
WITH_PT24 = WITH_PT20 and int(torch.__version__.split('.')[1]) >= 4
|
22
22
|
WITH_PT25 = WITH_PT20 and int(torch.__version__.split('.')[1]) >= 5
|
23
23
|
WITH_PT26 = WITH_PT20 and int(torch.__version__.split('.')[1]) >= 6
|
24
|
+
WITH_PT27 = WITH_PT20 and int(torch.__version__.split('.')[1]) >= 7
|
25
|
+
WITH_PT28 = WITH_PT20 and int(torch.__version__.split('.')[1]) >= 8
|
24
26
|
WITH_PT113 = WITH_PT20 or int(torch.__version__.split('.')[1]) >= 13
|
25
27
|
|
26
28
|
WITH_WINDOWS = os.name == 'nt'
|
@@ -95,7 +97,7 @@ except Exception as e:
|
|
95
97
|
WITH_CUDA_HASH_MAP = False
|
96
98
|
|
97
99
|
if WITH_CPU_HASH_MAP:
|
98
|
-
CPUHashMap: TypeAlias = torch.classes.pyg.CPUHashMap
|
100
|
+
CPUHashMap: TypeAlias = torch.classes.pyg.CPUHashMap # type: ignore
|
99
101
|
else:
|
100
102
|
|
101
103
|
class CPUHashMap: # type: ignore
|
@@ -107,7 +109,7 @@ else:
|
|
107
109
|
|
108
110
|
|
109
111
|
if WITH_CUDA_HASH_MAP:
|
110
|
-
CUDAHashMap: TypeAlias = torch.classes.pyg.CUDAHashMap
|
112
|
+
CUDAHashMap: TypeAlias = torch.classes.pyg.CUDAHashMap # type: ignore
|
111
113
|
else:
|
112
114
|
|
113
115
|
class CUDAHashMap: # type: ignore
|
@@ -234,10 +234,10 @@ def trim_sparse_tensor(src: SparseTensor, size: Tuple[int, int],
|
|
234
234
|
rowptr = torch.narrow(rowptr, 0, 0, size[0] + 1).clone()
|
235
235
|
rowptr[num_seed_nodes + 1:] = rowptr[num_seed_nodes]
|
236
236
|
|
237
|
-
col = torch.narrow(col, 0, 0, rowptr[-1])
|
237
|
+
col = torch.narrow(col, 0, 0, rowptr[-1]) # type: ignore
|
238
238
|
|
239
239
|
if value is not None:
|
240
|
-
value = torch.narrow(value, 0, 0, rowptr[-1])
|
240
|
+
value = torch.narrow(value, 0, 0, rowptr[-1]) # type: ignore
|
241
241
|
|
242
242
|
csr2csc = src.storage._csr2csc
|
243
243
|
if csr2csc is not None:
|
File without changes
|
{pyg_nightly-2.7.0.dev20250529.dist-info → pyg_nightly-2.7.0.dev20250531.dist-info}/licenses/LICENSE
RENAMED
File without changes
|