torchzero 0.3.5__py3-none-any.whl → 0.3.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tests/test_tensorlist.py +17 -17
- {torchzero-0.3.5.dist-info → torchzero-0.3.6.dist-info}/METADATA +1 -1
- {torchzero-0.3.5.dist-info → torchzero-0.3.6.dist-info}/RECORD +6 -6
- {torchzero-0.3.5.dist-info → torchzero-0.3.6.dist-info}/WHEEL +0 -0
- {torchzero-0.3.5.dist-info → torchzero-0.3.6.dist-info}/licenses/LICENSE +0 -0
- {torchzero-0.3.5.dist-info → torchzero-0.3.6.dist-info}/top_level.txt +0 -0
tests/test_tensorlist.py
CHANGED
|
@@ -1301,7 +1301,7 @@ def test_reduction_ops(simple_tl: TensorList, reduction_method, dim, keepdim):
|
|
|
1301
1301
|
expected_tl = TensorList(expected_list)
|
|
1302
1302
|
assert isinstance(result, TensorList)
|
|
1303
1303
|
assert len(result) == len(expected_tl)
|
|
1304
|
-
assert_tl_allclose(result, expected_tl, atol=1e-
|
|
1304
|
+
assert_tl_allclose(result, expected_tl, atol=1e-3) # Use allclose due to potential float variations
|
|
1305
1305
|
|
|
1306
1306
|
# --- Grafting, Rescaling, Normalizing, Clipping ---
|
|
1307
1307
|
|
|
@@ -1381,8 +1381,8 @@ def test_rescale(simple_tl: TensorList, dim):
|
|
|
1381
1381
|
assert torch.allclose(rescaled_scalar.global_min(), torch.tensor(min_val))
|
|
1382
1382
|
assert torch.allclose(rescaled_scalar.global_max(), torch.tensor(max_val))
|
|
1383
1383
|
else:
|
|
1384
|
-
assert_tl_allclose(rescaled_scalar_min, TensorList([torch.full_like(t, min_val) for t in rescaled_scalar_min]),atol=1e-
|
|
1385
|
-
assert_tl_allclose(rescaled_scalar_max, TensorList([torch.full_like(t, max_val) for t in rescaled_scalar_max]),atol=1e-
|
|
1384
|
+
assert_tl_allclose(rescaled_scalar_min, TensorList([torch.full_like(t, min_val) for t in rescaled_scalar_min]),atol=1e-3)
|
|
1385
|
+
assert_tl_allclose(rescaled_scalar_max, TensorList([torch.full_like(t, max_val) for t in rescaled_scalar_max]),atol=1e-3)
|
|
1386
1386
|
|
|
1387
1387
|
|
|
1388
1388
|
# Rescale list
|
|
@@ -1402,8 +1402,8 @@ def test_rescale(simple_tl: TensorList, dim):
|
|
|
1402
1402
|
assert global_max_rescaled < avg_max + 1.0 # Loose check
|
|
1403
1403
|
|
|
1404
1404
|
else:
|
|
1405
|
-
assert_tl_allclose(rescaled_list_min, TensorList([torch.full_like(t, mn) for t, mn in zip(rescaled_list_min, min_list)]),atol=1e-
|
|
1406
|
-
assert_tl_allclose(rescaled_list_max, TensorList([torch.full_like(t, mx) for t, mx in zip(rescaled_list_max, max_list)]),atol=1e-
|
|
1405
|
+
assert_tl_allclose(rescaled_list_min, TensorList([torch.full_like(t, mn) for t, mn in zip(rescaled_list_min, min_list)]),atol=1e-3)
|
|
1406
|
+
assert_tl_allclose(rescaled_list_max, TensorList([torch.full_like(t, mx) for t, mx in zip(rescaled_list_max, max_list)]),atol=1e-3)
|
|
1407
1407
|
|
|
1408
1408
|
# Rescale to 01 helper
|
|
1409
1409
|
rescaled_01 = simple_tl.rescale_to_01(dim=dim, eps=eps)
|
|
@@ -1413,8 +1413,8 @@ def test_rescale(simple_tl: TensorList, dim):
|
|
|
1413
1413
|
assert torch.allclose(rescaled_01.global_min(), torch.tensor(0.0))
|
|
1414
1414
|
assert torch.allclose(rescaled_01.global_max(), torch.tensor(1.0))
|
|
1415
1415
|
else:
|
|
1416
|
-
assert_tl_allclose(rescaled_01_min, TensorList([torch.zeros_like(t) for t in rescaled_01_min]), atol=1e-
|
|
1417
|
-
assert_tl_allclose(rescaled_01_max, TensorList([torch.ones_like(t) for t in rescaled_01_max]), atol=1e-
|
|
1416
|
+
assert_tl_allclose(rescaled_01_min, TensorList([torch.zeros_like(t) for t in rescaled_01_min]), atol=1e-3)
|
|
1417
|
+
assert_tl_allclose(rescaled_01_max, TensorList([torch.ones_like(t) for t in rescaled_01_max]), atol=1e-3)
|
|
1418
1418
|
|
|
1419
1419
|
|
|
1420
1420
|
# Test inplace
|
|
@@ -1454,11 +1454,11 @@ def test_normalize(big_tl: TensorList, dim):
|
|
|
1454
1454
|
normalized_scalar_var = normalized_scalar.var(dim=dim if dim != 'global' else None)
|
|
1455
1455
|
|
|
1456
1456
|
if dim == 'global':
|
|
1457
|
-
assert torch.allclose(normalized_scalar.global_mean(), torch.tensor(mean_val), atol=1e-
|
|
1458
|
-
assert torch.allclose(normalized_scalar.global_var(), torch.tensor(var_val), atol=1e-
|
|
1457
|
+
assert torch.allclose(normalized_scalar.global_mean(), torch.tensor(mean_val), atol=1e-3)
|
|
1458
|
+
assert torch.allclose(normalized_scalar.global_var(), torch.tensor(var_val), atol=1e-3)
|
|
1459
1459
|
else:
|
|
1460
|
-
assert_tl_allclose(normalized_scalar_mean, TensorList([torch.full_like(t, mean_val) for t in normalized_scalar_mean]), atol=1e-
|
|
1461
|
-
assert_tl_allclose(normalized_scalar_var, TensorList([torch.full_like(t, var_val) for t in normalized_scalar_var]), atol=1e-
|
|
1460
|
+
assert_tl_allclose(normalized_scalar_mean, TensorList([torch.full_like(t, mean_val) for t in normalized_scalar_mean]), atol=1e-3)
|
|
1461
|
+
assert_tl_allclose(normalized_scalar_var, TensorList([torch.full_like(t, var_val) for t in normalized_scalar_var]), atol=1e-3)
|
|
1462
1462
|
|
|
1463
1463
|
# Normalize list mean/var
|
|
1464
1464
|
normalized_list = simple_tl.normalize(mean_list, var_list, dim=dim)
|
|
@@ -1476,19 +1476,19 @@ def test_normalize(big_tl: TensorList, dim):
|
|
|
1476
1476
|
# assert torch.allclose(global_mean_rescaled, torch.tensor(avg_mean), rtol=1e-1, atol=1e-1) # Loose check
|
|
1477
1477
|
# assert torch.allclose(global_var_rescaled, torch.tensor(avg_var), rtol=1e-1, atol=1e-1) # Loose check
|
|
1478
1478
|
else:
|
|
1479
|
-
assert_tl_allclose(normalized_list_mean, TensorList([torch.full_like(t, m) for t, m in zip(normalized_list_mean, mean_list)]), atol=1e-
|
|
1480
|
-
assert_tl_allclose(normalized_list_var, TensorList([torch.full_like(t, v) for t, v in zip(normalized_list_var, var_list)]), atol=1e-
|
|
1479
|
+
assert_tl_allclose(normalized_list_mean, TensorList([torch.full_like(t, m) for t, m in zip(normalized_list_mean, mean_list)]), atol=1e-3)
|
|
1480
|
+
assert_tl_allclose(normalized_list_var, TensorList([torch.full_like(t, v) for t, v in zip(normalized_list_var, var_list)]), atol=1e-3)
|
|
1481
1481
|
|
|
1482
1482
|
# Z-normalize helper
|
|
1483
1483
|
znorm = simple_tl.znormalize(dim=dim, eps=1e-10)
|
|
1484
1484
|
znorm_mean = znorm.mean(dim=dim if dim != 'global' else None)
|
|
1485
1485
|
znorm_var = znorm.var(dim=dim if dim != 'global' else None)
|
|
1486
1486
|
if dim == 'global':
|
|
1487
|
-
assert torch.allclose(znorm.global_mean(), torch.tensor(0.0), atol=1e-
|
|
1488
|
-
assert torch.allclose(znorm.global_var(), torch.tensor(1.0), atol=1e-
|
|
1487
|
+
assert torch.allclose(znorm.global_mean(), torch.tensor(0.0), atol=1e-3)
|
|
1488
|
+
assert torch.allclose(znorm.global_var(), torch.tensor(1.0), atol=1e-3)
|
|
1489
1489
|
else:
|
|
1490
|
-
assert_tl_allclose(znorm_mean, TensorList([torch.zeros_like(t) for t in znorm_mean]), atol=1e-
|
|
1491
|
-
assert_tl_allclose(znorm_var, TensorList([torch.ones_like(t) for t in znorm_var]), atol=1e-
|
|
1490
|
+
assert_tl_allclose(znorm_mean, TensorList([torch.zeros_like(t) for t in znorm_mean]), atol=1e-3)
|
|
1491
|
+
assert_tl_allclose(znorm_var, TensorList([torch.ones_like(t) for t in znorm_var]), atol=1e-3)
|
|
1492
1492
|
|
|
1493
1493
|
|
|
1494
1494
|
# Test inplace
|
|
@@ -2,7 +2,7 @@ docs/source/conf.py,sha256=jd80ZT2IdCx7nlQrpOTJL8UhGBNm6KYyXlpp0jmRiAw,1849
|
|
|
2
2
|
tests/test_identical.py,sha256=NZ7A8Rm1U9Q16d-cG2G_wccpPtNALyoKYJt9qMownMc,11568
|
|
3
3
|
tests/test_module.py,sha256=qX3rjdSJsbA8JO17bPTUIDspe7bg2dogqxMw__KV7SU,2039
|
|
4
4
|
tests/test_opts.py,sha256=oDZVFr9AE9ZhyR-sImSgNzQsbPsUtJLzuLd1Nxgkp1w,40850
|
|
5
|
-
tests/test_tensorlist.py,sha256=
|
|
5
|
+
tests/test_tensorlist.py,sha256=VWX9wYdfkG-0Y8I0wWPp56ZJM0mBNPvS_SC3irmcYcs,72427
|
|
6
6
|
tests/test_utils_optimizer.py,sha256=bvC0Ehvs2L8fohpyIF5Vfr9OKTycpnODWLPflXilU1c,8414
|
|
7
7
|
tests/test_vars.py,sha256=3p9dsHk7SJpMd-WRD0ziBNq5FEHRBJGSxbMLD8ES4J0,6815
|
|
8
8
|
torchzero/__init__.py,sha256=L7IJ1qZ3o8E9oRwlJZBK2_2yII_eeGEk57Of6EfVbrk,112
|
|
@@ -121,8 +121,8 @@ torchzero/utils/linalg/orthogonalize.py,sha256=mDCkET7qgDZqf_y6oPYAK3d2L5HrB8gzO
|
|
|
121
121
|
torchzero/utils/linalg/qr.py,sha256=L-RXuYV-SIHI-Llq4y1rQ_Tz-yamds0_QNZeHapbjNE,2507
|
|
122
122
|
torchzero/utils/linalg/solve.py,sha256=hN450ONzAirYOvWF2g0E0Wy2n1bCw4X-KXWi6p4jvDM,5136
|
|
123
123
|
torchzero/utils/linalg/svd.py,sha256=wBxl-JSciINV-N6zvM4SGdveqMr6idq51h68LyQQRYg,660
|
|
124
|
-
torchzero-0.3.
|
|
125
|
-
torchzero-0.3.
|
|
126
|
-
torchzero-0.3.
|
|
127
|
-
torchzero-0.3.
|
|
128
|
-
torchzero-0.3.
|
|
124
|
+
torchzero-0.3.6.dist-info/licenses/LICENSE,sha256=r9ZciAoZoqKC_FNADE0ORukj1p1XhLXEbegdsAyqhJs,1087
|
|
125
|
+
torchzero-0.3.6.dist-info/METADATA,sha256=wjXJuO_WRQYv15BSA_9yo2qe2xe7jET7YOy8xb9YmnE,13944
|
|
126
|
+
torchzero-0.3.6.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
|
|
127
|
+
torchzero-0.3.6.dist-info/top_level.txt,sha256=YDdpIOb7HyKV9THOtOYsFFMTbxvCO0kiol4-83tDj-A,21
|
|
128
|
+
torchzero-0.3.6.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|