returnn 1.20250708.165746__py3-none-any.whl → 1.20250717.120243__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of returnn might be problematic. Click here for more details.
- returnn/PKG-INFO +1 -1
- returnn/_setup_info_generated.py +2 -2
- returnn/datasets/distrib_files.py +13 -7
- returnn/frontend/audio/specaugment.py +2 -2
- {returnn-1.20250708.165746.dist-info → returnn-1.20250717.120243.dist-info}/METADATA +1 -1
- {returnn-1.20250708.165746.dist-info → returnn-1.20250717.120243.dist-info}/RECORD +9 -9
- {returnn-1.20250708.165746.dist-info → returnn-1.20250717.120243.dist-info}/LICENSE +0 -0
- {returnn-1.20250708.165746.dist-info → returnn-1.20250717.120243.dist-info}/WHEEL +0 -0
- {returnn-1.20250708.165746.dist-info → returnn-1.20250717.120243.dist-info}/top_level.txt +0 -0
returnn/PKG-INFO
CHANGED
returnn/_setup_info_generated.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
version = '1.
|
|
2
|
-
long_version = '1.
|
|
1
|
+
version = '1.20250717.120243'
|
|
2
|
+
long_version = '1.20250717.120243+git.c980de6'
|
|
@@ -174,17 +174,21 @@ class DistributeFilesDataset(CachedDataset2):
|
|
|
174
174
|
|
|
175
175
|
self.distrib_shard_files = distrib_shard_files
|
|
176
176
|
if distrib_shard_files:
|
|
177
|
-
assert self._num_shards == 1 and self._shard_index == 0, ( # ensure defaults are set
|
|
178
|
-
f"{self}: Cannot use both dataset-sharding via properties _num_shards and _shard index "
|
|
179
|
-
f"and {self.__class__.__name__}'s own sharding implementation based on the trainings rank and size."
|
|
180
|
-
)
|
|
181
177
|
if _distrib_info:
|
|
182
|
-
#
|
|
178
|
+
# We're in a child process.
|
|
179
|
+
# `_get_rank_and_size()` no longer works,
|
|
183
180
|
# so we pass the info about the shards via a pickled property.
|
|
184
181
|
# See also Dataset.__reduce__.
|
|
185
|
-
|
|
186
|
-
|
|
182
|
+
# _num_shards and _shard_index are already set, so just check.
|
|
183
|
+
assert (
|
|
184
|
+
self._shard_index == _distrib_info["_shard_index"]
|
|
185
|
+
and self._num_shards == _distrib_info["_num_shards"]
|
|
186
|
+
)
|
|
187
187
|
else:
|
|
188
|
+
assert self._num_shards == 1 and self._shard_index == 0, ( # ensure defaults are set
|
|
189
|
+
f"{self}: Cannot use both dataset-sharding via properties _num_shards and _shard index "
|
|
190
|
+
f"and {self.__class__.__name__}'s own sharding implementation based on the trainings rank and size."
|
|
191
|
+
)
|
|
188
192
|
self._shard_index, self._num_shards = _get_rank_and_size()
|
|
189
193
|
assert 0 <= self._shard_index < self._num_shards
|
|
190
194
|
|
|
@@ -524,6 +528,8 @@ def _get_rank_and_size() -> Tuple[int, int]:
|
|
|
524
528
|
|
|
525
529
|
ctx = returnn.tf.horovod.get_ctx(config=config)
|
|
526
530
|
return ctx.rank(), ctx.size()
|
|
531
|
+
elif config.typed_value("__debug_dummy_distributed_rank_and_size") is not None:
|
|
532
|
+
return config.typed_value("__debug_dummy_distributed_rank_and_size")
|
|
527
533
|
else:
|
|
528
534
|
return 0, 1
|
|
529
535
|
|
|
@@ -26,10 +26,10 @@ def specaugment(
|
|
|
26
26
|
"""
|
|
27
27
|
SpecAugment, https://arxiv.org/abs/1904.08779
|
|
28
28
|
"""
|
|
29
|
-
if
|
|
29
|
+
if feature_dim is None:
|
|
30
30
|
assert x.feature_dim
|
|
31
31
|
feature_dim = x.feature_dim
|
|
32
|
-
if
|
|
32
|
+
if max_consecutive_feature_dims is None:
|
|
33
33
|
max_consecutive_feature_dims = feature_dim.dimension // 5
|
|
34
34
|
if global_train_step_dependent:
|
|
35
35
|
with rf.set_default_device_ctx("cpu"):
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
returnn/PKG-INFO,sha256=
|
|
1
|
+
returnn/PKG-INFO,sha256=GxZbYJvbUiJoMvMKBm2i5UDgmCZf7Fm7NtO_L36oMa0,5215
|
|
2
2
|
returnn/__init__.py,sha256=biBtRsM0WZ406vShaeH-9WFoqJ8XwTbn6g0EeFJ7l8E,1012
|
|
3
3
|
returnn/__main__.py,sha256=lHyZcu_0yc9f7Vf_Kfdy9PmeU0T76XVXnpalHi5WKro,31740
|
|
4
4
|
returnn/__old_mod_loader__.py,sha256=nvsNY-xELdS_IPNkv66Q9Rmvg4dbGW0-EBRDcCmctos,7654
|
|
5
5
|
returnn/__setup__.py,sha256=22kQn2fh11iPM0hLb2Fy5sLmoU1JGvmDxXRYuRgQkwU,4659
|
|
6
|
-
returnn/_setup_info_generated.py,sha256=
|
|
6
|
+
returnn/_setup_info_generated.py,sha256=Wgz5eZ1DzRRaKRkPpQq_p-a3XZcZGMjQE4uNaLXtzNs,77
|
|
7
7
|
returnn/config.py,sha256=3tmKhB6FnQZaNdtcYsiB61JnEY--iZ2qmJ4yq0b6tE0,29140
|
|
8
8
|
returnn/forward_iface.py,sha256=A_OJiaXsX4MlXQRzST86ylyxSUZbC402PQL1REcqHjM,911
|
|
9
9
|
returnn/learning_rate_control.py,sha256=ZvWryAn_tv9DhV8sh1LV3eE34Yltl3On3mYZAG4hR9s,34684
|
|
@@ -17,7 +17,7 @@ returnn/datasets/basic.py,sha256=S7MoFnQfgjeuZsBnaPrpQWlMUmH68HlUnjX7k881PD0,723
|
|
|
17
17
|
returnn/datasets/bundle_file.py,sha256=KQNrS1MSf-4_idlK0c0KFwON-f5sEK0sWU15WpoMYpE,2380
|
|
18
18
|
returnn/datasets/cached.py,sha256=RyefRjSDdp-HveK-2vLy2C6BIHcpqQ_lNvUKlIa4QAI,25412
|
|
19
19
|
returnn/datasets/cached2.py,sha256=_6pza3IG68JexaExhj1ld3fP6pE7T-G804driJ9Z_qo,12141
|
|
20
|
-
returnn/datasets/distrib_files.py,sha256=
|
|
20
|
+
returnn/datasets/distrib_files.py,sha256=SJ2YkZEZmG9lu3MLTwSMyVNfsXzRHqbLNjUn9IDwVJM,30194
|
|
21
21
|
returnn/datasets/generating.py,sha256=9U_w6URIrv-Rb-hDbPOzYW9qYXzJbw32N6G268IKyoM,99833
|
|
22
22
|
returnn/datasets/hdf.py,sha256=v5sjBenURR9Z-g7AQ9tsL84yDSye5RtbLpym3M6HSDE,67833
|
|
23
23
|
returnn/datasets/lm.py,sha256=ycHdGHxT4QshBM9LPktLDaaQRTLO5zQyueCK5KMNR_4,100022
|
|
@@ -129,7 +129,7 @@ returnn/frontend/_native/tensor_ops.cpp,sha256=bA4Gf-q8cVENL441r1IYVd44EcUsV-eEL
|
|
|
129
129
|
returnn/frontend/_native/tensor_ops.hpp,sha256=dDqvUejRNHjItnmOP5aHyAQbAmXmXoDVXSe3tveEU8A,3732
|
|
130
130
|
returnn/frontend/audio/__init__.py,sha256=8mahwucBje8qHKw0bOvoySlvvD0rFKxviSvcAHSjiJY,67
|
|
131
131
|
returnn/frontend/audio/mel.py,sha256=LNzC9aWWgLqua34bwxA--M9shtLlePfwLQ-HpvP2o54,7884
|
|
132
|
-
returnn/frontend/audio/specaugment.py,sha256=
|
|
132
|
+
returnn/frontend/audio/specaugment.py,sha256=_GQ1ZypeDa81zigvMXnaA-52vnkR6-cs7ctW_uE8vlM,5849
|
|
133
133
|
returnn/frontend/conversions/__init__.py,sha256=7plsDxWVYhASa-3qmqbdzSI34A9ujUH2iMkL3eRD0TI,84
|
|
134
134
|
returnn/frontend/conversions/espnet_e_branchformer.py,sha256=Mmp3G6nySy0CqeHa-um-RAuUSnFH1DKNjBbqQB_Pomo,9018
|
|
135
135
|
returnn/frontend/conversions/hf_llama.py,sha256=1WQOhQyUWwkAznaRqK2zpThP8XZbaomkaE8qMG_bZPY,9662
|
|
@@ -253,8 +253,8 @@ returnn/util/sig_proc.py,sha256=Tjz0VOAVyqu2qDCF5HZ1JjALjcFsHcNkcd96WgZeKfE,7265
|
|
|
253
253
|
returnn/util/task_system.py,sha256=y4sMVXQ25Qd2z0rx03uOlXlkE-jbCYC1Sjfn-XlraVU,26003
|
|
254
254
|
returnn/util/train_proc_manager.py,sha256=Pjht28k6uz6BNQ47uW6Gf880iyq5q4wx7P_K2tmoAM8,3266
|
|
255
255
|
returnn/util/watch_memory.py,sha256=BR5P2kvBN6UI81cE0_1WAA6Hd1SByLbBaiDxvLhPOew,4213
|
|
256
|
-
returnn-1.
|
|
257
|
-
returnn-1.
|
|
258
|
-
returnn-1.
|
|
259
|
-
returnn-1.
|
|
260
|
-
returnn-1.
|
|
256
|
+
returnn-1.20250717.120243.dist-info/LICENSE,sha256=ywBD_U2aD4vpuoIgNAsjIGBYydl0tVKll3De0Z8s77c,11041
|
|
257
|
+
returnn-1.20250717.120243.dist-info/METADATA,sha256=GxZbYJvbUiJoMvMKBm2i5UDgmCZf7Fm7NtO_L36oMa0,5215
|
|
258
|
+
returnn-1.20250717.120243.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
|
|
259
|
+
returnn-1.20250717.120243.dist-info/top_level.txt,sha256=Lsn4WZc5Pbfk0-xDQOgnFCxOoqxL4CyeM3N1TFbJncw,8
|
|
260
|
+
returnn-1.20250717.120243.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|