returnn 1.20250828.142552__py3-none-any.whl → 1.20250830.114445__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of returnn might be problematic. Click here for more details.

returnn/PKG-INFO CHANGED
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: returnn
3
- Version: 1.20250828.142552
3
+ Version: 1.20250830.114445
4
4
  Summary: The RWTH extensible training framework for universal recurrent neural networks
5
5
  Home-page: https://github.com/rwth-i6/returnn/
6
6
  Author: Albert Zeyer
@@ -1,2 +1,2 @@
1
- version = '1.20250828.142552'
2
- long_version = '1.20250828.142552+git.f81cb9a'
1
+ version = '1.20250830.114445'
2
+ long_version = '1.20250830.114445+git.24547d9'
@@ -341,7 +341,9 @@ def window(
341
341
  """
342
342
  if spatial_dim.need_masking():
343
343
  if use_mask is None:
344
- use_mask = rf.use_mask_default(default=True, default_false_for_behavior_version_up_to=22)
344
+ use_mask = rf.use_mask_default(
345
+ default=True, default_false_for_behavior_version_up_to=22, func_name="window"
346
+ )
345
347
  if use_mask:
346
348
  source = source.copy_masked(0, dims=[spatial_dim])
347
349
  assert window_dim.dimension is not None
@@ -905,7 +907,9 @@ def scatter(
905
907
  indices_dim = indices_dim if isinstance(indices_dim, (list, tuple)) else [indices_dim]
906
908
  if any(dim.need_masking() for dim in indices_dim):
907
909
  if use_mask is None:
908
- use_mask = rf.use_mask_default(default=True, default_false_for_behavior_version_up_to=22)
910
+ use_mask = rf.use_mask_default(
911
+ default=True, default_false_for_behavior_version_up_to=22, func_name="scatter"
912
+ )
909
913
  if use_mask:
910
914
  source = source.copy_masked(fill_value, dims=indices_dim)
911
915
  else:
returnn/frontend/conv.py CHANGED
@@ -223,7 +223,7 @@ def conv(
223
223
  """
224
224
  if any(in_spatial_dim.need_masking() for in_spatial_dim in in_spatial_dims):
225
225
  if use_mask is None:
226
- use_mask = rf.use_mask_default(default=True, default_false_for_behavior_version_up_to=22)
226
+ use_mask = rf.use_mask_default(default=True, default_false_for_behavior_version_up_to=22, func_name="conv")
227
227
  if use_mask:
228
228
  source = source.copy_masked(0, dims=in_spatial_dims)
229
229
  for in_spatial_dim in in_spatial_dims:
@@ -391,7 +391,9 @@ def transposed_conv(
391
391
  """transposed conv"""
392
392
  if any(in_spatial_dim.need_masking() for in_spatial_dim in in_spatial_dims):
393
393
  if use_mask is None:
394
- use_mask = rf.use_mask_default(default=True, default_false_for_behavior_version_up_to=22)
394
+ use_mask = rf.use_mask_default(
395
+ default=True, default_false_for_behavior_version_up_to=22, func_name="transposed_conv"
396
+ )
395
397
  if use_mask:
396
398
  source = source.copy_masked(0, dims=in_spatial_dims)
397
399
  if padding == "same" and _any_is_non_default(strides, default=1) and _should_use_consistent_same_padding():
@@ -503,7 +505,7 @@ def pool(
503
505
 
504
506
  if any(in_spatial_dim.need_masking() for in_spatial_dim in in_spatial_dims):
505
507
  if use_mask is None:
506
- use_mask = rf.use_mask_default(default=True, default_false_for_behavior_version_up_to=22)
508
+ use_mask = rf.use_mask_default(default=True, default_false_for_behavior_version_up_to=22, func_name="pool")
507
509
  if use_mask:
508
510
  source = source.copy_masked({"max": float("-inf"), "avg": 0}[mode], dims=in_spatial_dims)
509
511
  else:
returnn/frontend/dims.py CHANGED
@@ -3,7 +3,7 @@ Utilities for dimension tags, dimensions, axes.
3
3
  """
4
4
 
5
5
  from __future__ import annotations
6
- from typing import Optional, Union, TypeVar, Sequence, Tuple
6
+ from typing import TYPE_CHECKING, Optional, Union, TypeVar, Sequence, Tuple
7
7
  from returnn.tensor import Tensor, Dim
8
8
  import returnn.frontend as rf
9
9
  from ._backend import get_backend_by_tensor, global_backend
@@ -25,6 +25,9 @@ __all__ = [
25
25
  "use_mask_default",
26
26
  ]
27
27
 
28
+ if TYPE_CHECKING:
29
+ from returnn.config import Config
30
+
28
31
 
29
32
  def range_over_dim(dim: Dim, *, dtype: Optional[str] = None, device: Optional[str] = None) -> Tensor[T]:
30
33
  """
@@ -309,7 +312,10 @@ def last_frame_position_of_dim(
309
312
 
310
313
 
311
314
  def use_mask_default(
312
- *, default: Optional[bool] = None, default_false_for_behavior_version_up_to: Optional[int] = None
315
+ *,
316
+ default: Optional[bool] = None,
317
+ default_false_for_behavior_version_up_to: Optional[int] = None,
318
+ func_name: Optional[str] = None,
313
319
  ) -> Optional[bool]:
314
320
  """
315
321
  Check the global RETURNN config for the ``rf_use_mask``
@@ -324,20 +330,20 @@ def use_mask_default(
324
330
  and if this is set, and the behavior version is less or equal,
325
331
  then return False by default, i.e. do not use the mask by default, if it is not defined in the config.
326
332
  This takes precedence over `default`.
333
+ :param func_name: if specified, also check
327
334
  :return: what to use for the ``use_mask`` argument by default
328
335
  """
329
336
  from returnn.config import get_global_config
330
337
 
331
338
  config = get_global_config(raise_exception=False)
332
- config_value = None
333
339
  if config:
334
- if "rf_use_mask" in config.typed_dict:
335
- config_value = config.typed_dict["rf_use_mask"]
336
- assert config_value is None or isinstance(config_value, bool)
337
- elif "rf_use_mask" in config.dict:
338
- config_value = config.bool("rf_use_mask", None)
339
- if config_value is not None:
340
- return config_value
340
+ config_value = _get_opt_bool_from_config(config, "rf_use_mask")
341
+ if config_value is not None:
342
+ return config_value
343
+ if func_name:
344
+ config_value = _get_opt_bool_from_config(config, f"rf_use_mask_{func_name}")
345
+ if config_value is not None:
346
+ return config_value
341
347
 
342
348
  if default_false_for_behavior_version_up_to is not None:
343
349
  from returnn.util.basic import BehaviorVersion
@@ -345,3 +351,13 @@ def use_mask_default(
345
351
  if BehaviorVersion.get() <= default_false_for_behavior_version_up_to:
346
352
  return False
347
353
  return default
354
+
355
+
356
+ def _get_opt_bool_from_config(config: Config, key: str) -> Optional[bool]:
357
+ if key in config.typed_dict:
358
+ config_value = config.typed_dict[key]
359
+ assert config_value is None or isinstance(config_value, bool)
360
+ return config_value
361
+ elif key in config.dict:
362
+ return config.bool(key, None)
363
+ return None
returnn/frontend/hooks.py CHANGED
@@ -16,7 +16,7 @@ T = TypeVar("T")
16
16
 
17
17
 
18
18
  def setup_post_hook_on_method(
19
- obj: Any,
19
+ obj: T,
20
20
  attr: str,
21
21
  hook: Callable[[T, Tuple[Any, ...], Dict[str, Any], Any], Optional[Any]],
22
22
  *,
@@ -40,7 +40,7 @@ class MethodWithHooks:
40
40
  """
41
41
 
42
42
  @classmethod
43
- def get(cls, obj: Any, attr: str) -> MethodWithHooks:
43
+ def get(cls, obj: T, attr: str) -> MethodWithHooks:
44
44
  """get existing or init new :class:`MethodWithHooks`"""
45
45
  method = getattr(obj, attr)
46
46
  if not isinstance(method, MethodWithHooks):
@@ -56,7 +56,7 @@ class MethodWithHooks:
56
56
  method.setup()
57
57
  return method
58
58
 
59
- def __init__(self, obj: Any, attr: str):
59
+ def __init__(self, obj: T, attr: str):
60
60
  """
61
61
  :param obj:
62
62
  :param attr:
@@ -115,7 +115,7 @@ def _mask(
115
115
  return s
116
116
  if not allow_dim_extension or mask_value is None or (isinstance(mask_value, (int, float)) and mask_value == 0):
117
117
  if mask.dims_set.issubset(s.dims_set):
118
- return rf.where(mask, s, mask_value)
118
+ return rf.where(mask, s, rf.cast(mask_value, s.dtype))
119
119
  assert not mask.dims_set.intersection(s.dims_set) # not sure...
120
120
  return s
121
121
  assert isinstance(mask_value, (int, float, Tensor))
@@ -218,7 +218,7 @@ class BatchNorm(rf.Module):
218
218
 
219
219
  if any(d.need_masking() for d in source.dims if d != self.in_dim):
220
220
  if self.use_mask is None:
221
- use_mask = rf.use_mask_default(default=True)
221
+ use_mask = rf.use_mask_default(default=True, func_name="BatchNorm")
222
222
  else:
223
223
  use_mask = self.use_mask
224
224
  else:
@@ -71,7 +71,7 @@ def stft(
71
71
  """
72
72
  if in_spatial_dim.need_masking():
73
73
  if use_mask is None:
74
- use_mask = rf.use_mask_default(default=True, default_false_for_behavior_version_up_to=22)
74
+ use_mask = rf.use_mask_default(default=True, default_false_for_behavior_version_up_to=22, func_name="stft")
75
75
  if use_mask:
76
76
  x = x.copy_masked(0, dims=[in_spatial_dim])
77
77
  fft_length = fft_length or frame_length
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: returnn
3
- Version: 1.20250828.142552
3
+ Version: 1.20250830.114445
4
4
  Summary: The RWTH extensible training framework for universal recurrent neural networks
5
5
  Home-page: https://github.com/rwth-i6/returnn/
6
6
  Author: Albert Zeyer
@@ -1,9 +1,9 @@
1
- returnn/PKG-INFO,sha256=Uvf8zgSBctl_SphH-v2ikfVE9N-jlwEAGCiUPtlx8iY,5215
1
+ returnn/PKG-INFO,sha256=PZOQvfJJKkUgAsvh_nvFeAdQOFCWpBuj2T4euwap8VA,5215
2
2
  returnn/__init__.py,sha256=biBtRsM0WZ406vShaeH-9WFoqJ8XwTbn6g0EeFJ7l8E,1012
3
3
  returnn/__main__.py,sha256=lHyZcu_0yc9f7Vf_Kfdy9PmeU0T76XVXnpalHi5WKro,31740
4
4
  returnn/__old_mod_loader__.py,sha256=nvsNY-xELdS_IPNkv66Q9Rmvg4dbGW0-EBRDcCmctos,7654
5
5
  returnn/__setup__.py,sha256=22kQn2fh11iPM0hLb2Fy5sLmoU1JGvmDxXRYuRgQkwU,4659
6
- returnn/_setup_info_generated.py,sha256=QXZpIuHEV8wkbvcOm3273IFMbpbj15j4l3g_UoApY08,77
6
+ returnn/_setup_info_generated.py,sha256=ukzF4nRM4yknegwGHq4ktCyLc7tP9fasq5bpzDq0Tvg,77
7
7
  returnn/config.py,sha256=3tmKhB6FnQZaNdtcYsiB61JnEY--iZ2qmJ4yq0b6tE0,29140
8
8
  returnn/forward_iface.py,sha256=A_OJiaXsX4MlXQRzST86ylyxSUZbC402PQL1REcqHjM,911
9
9
  returnn/learning_rate_control.py,sha256=ZvWryAn_tv9DhV8sh1LV3eE34Yltl3On3mYZAG4hR9s,34684
@@ -80,7 +80,7 @@ returnn/frontend/_cache.py,sha256=Uao2xzfvVaKABk1fkxcpXzxKIGJaI9FwwlTvvoNUstk,85
80
80
  returnn/frontend/_numpy_backend.py,sha256=fZjks7p3dgxVZ6tSDazTTgBxNjJqXjfqgw_7mA7rDEE,9066
81
81
  returnn/frontend/_random_journal.py,sha256=_ktP_mjgx8vtQQGX_DofdhewJj0aPiczefTWeemPkmo,5457
82
82
  returnn/frontend/_utils.py,sha256=uVQldGHyYKIyhSEmumJ04ix5eP5tjZw4CEC0w6-zhyQ,12074
83
- returnn/frontend/array_.py,sha256=Up5cB5kPaBgGToDdnyhJ2KsoMWKIdmuRdXq6OdBLzi4,53754
83
+ returnn/frontend/array_.py,sha256=7uX5-Os2OyYUfC5soprIUx7rr-371yKf9DcckRKONXY,53855
84
84
  returnn/frontend/attention.py,sha256=GKt-Xqnz8sIyXVrE0i4VCS7J2Wu7dmoH_BA0Cu8CrXQ,45769
85
85
  returnn/frontend/backend.py,sha256=iQ9w4xl8Ea7bgpb0VUaCKq50rV5Bl2E5J8Rhd-oqD_c,883
86
86
  returnn/frontend/build_from_dict.py,sha256=rfWa2rjjhIR_kIQED_nMrygrQBunS6unegzWTLVbC98,3017
@@ -88,14 +88,14 @@ returnn/frontend/cond.py,sha256=gh6wg0aSbAJQfKRv4BQAu-EfPWtWPLFjgc8IaPPFmwg,1023
88
88
  returnn/frontend/const.py,sha256=A5fP9w6Akv56d89pPvdoZaXvC9ZTYcexepnS9O2clOc,3945
89
89
  returnn/frontend/container.py,sha256=wF3OlQN7WlOVmmdapUth_Unha3DVf6h1B7okBJAuJDA,8011
90
90
  returnn/frontend/control_flow_ctx.py,sha256=v17CsNwRnZYe8GdMtGJt2ftibfxMCGK1i0l-GX5ILu0,699
91
- returnn/frontend/conv.py,sha256=lca3hG0FO2IEOoe5OvOnm9NU2Ofx_RPqnCxZqxocUdM,32079
91
+ returnn/frontend/conv.py,sha256=4Mrq7MFc0f7SJ8g-wJEv4Lg3Stmju-fMwD09qKv6CuQ,32174
92
92
  returnn/frontend/device.py,sha256=Sjara0EmFLhu9O55cN_p6OwU0NgdNCCQjyAuQhiWpGw,1437
93
- returnn/frontend/dims.py,sha256=aH5FQ_m0xMD6Rj-BUWGx8lB-HkCuwZfMBf6mZbGGW5E,12611
93
+ returnn/frontend/dims.py,sha256=_HDU-Kxn3pApicFkm0F4Fs-ZAuF1gKXG8rroQHCFQQI,13073
94
94
  returnn/frontend/dropout.py,sha256=TjqZCKDIOBeHr14-NCemOm9m3p84LxQuPH1DvRAYg88,5028
95
95
  returnn/frontend/dtype.py,sha256=Ooc5BrcNrTp6XShuFEV9g5V6-niuy4ImP_Lt_Qgq3jE,1886
96
96
  returnn/frontend/gradient.py,sha256=G-Qv4gKGHYEeB92Zwco9ao4qjd6umZPUzQC4J-fbYWo,4033
97
97
  returnn/frontend/graph.py,sha256=PIv901WZ1rfTV0QGkyzBv6UxfWk9NsLGxdoJ5x9-8Xg,1818
98
- returnn/frontend/hooks.py,sha256=jYPbsb4gy5HORRZvKTEJbLcoJri5hOt5ADbhnTCytQo,5507
98
+ returnn/frontend/hooks.py,sha256=L7ITrlEQ6JUy8fEBE0SXg1dzFNkLrgb8gxZm88fxryU,5501
99
99
  returnn/frontend/init.py,sha256=bVB7bpghaY8DI_HL0mkB_9z95onWnIX2zlW4hlMYnRw,7494
100
100
  returnn/frontend/label_smoothing.py,sha256=lxmaowNr61sCMzMewqHhu1r0CcklYfhLXlFnBu8DeAU,5676
101
101
  returnn/frontend/linear.py,sha256=xRUjnkD3MTWDezSaYATBYJQ2fa1RhKMNrTuhC54hhVs,2252
@@ -104,8 +104,8 @@ returnn/frontend/loss.py,sha256=uSvou2MPd13JiLAg_OIQ3AyyLvD3RHjMEVgFEN0gKqU,7440
104
104
  returnn/frontend/math_.py,sha256=A_RkZ5lH2uXMchfPIH3itraWtMNNCVckQHHpf7aIIZQ,17295
105
105
  returnn/frontend/matmul.py,sha256=xkueyxzSDz8MsYaWxPSjmV2Yy-tcaiOQDXbFt1IQM2A,1944
106
106
  returnn/frontend/module.py,sha256=219rh5mE0CD0-NdxXLsKyhv3BNtOI9jSyiI1Rb8MOyU,10700
107
- returnn/frontend/nested.py,sha256=P84u_cjoYdYRJ_0Cbt0vlKXxskmXTDfsnw_vFCCNKtU,15107
108
- returnn/frontend/normalization.py,sha256=-lYJ9IWcheOQu1gXJehSOA76qgVtxd1C07Jqps6Qg1o,14116
107
+ returnn/frontend/nested.py,sha256=6Nbe3pPIuSziI33Pi_qis0vwHjuVUNXDmedqfzIBfOU,15125
108
+ returnn/frontend/normalization.py,sha256=NrIIaZ3c2yf-WH2R9lPaL2TAq4IcNQc4OE5kFYdoihw,14139
109
109
  returnn/frontend/parameter.py,sha256=zvrkhSYC1c_O9kVwgHvOtOnWNurl5J28lkS0i1LQpWU,10627
110
110
  returnn/frontend/parametrizations.py,sha256=ptNgBw5IiPXVpB3QGse7AGAhdXp8X1rCqYUl2Mae8aI,2876
111
111
  returnn/frontend/parametrize.py,sha256=VhgTEP7ehON950Q4bkCy8rvg9641moEKAXn0XzomK6E,7216
@@ -114,7 +114,7 @@ returnn/frontend/rand.py,sha256=2x7AHSYH_tZkzTk_q3t3GA_yYRNeKsVbJjw2InqSGDk,1354
114
114
  returnn/frontend/rec.py,sha256=6YSsSG7fdtfvvg24vmexSg8R2aVCcKHBdGLh-Mgn9Co,8037
115
115
  returnn/frontend/reduce.py,sha256=gRSvBJZNHa757IqBxGw4hu5eiO3pjie_ptEwUXHLSCs,10340
116
116
  returnn/frontend/run_ctx.py,sha256=yyOMUCKTOe19C4z2Nfly4YCLBmQ9ihip6nGrkW-Y6qg,23789
117
- returnn/frontend/signal.py,sha256=hfDipDhO0n9nXhGy7txwYUNbvg28NqkFq9p0Jq46f9c,4411
117
+ returnn/frontend/signal.py,sha256=iBRO2ywpJOjIUfVveJaqX4NT59013VCoE49IHkVn6p8,4429
118
118
  returnn/frontend/state.py,sha256=EePdrx6PtWL4mJ2XZmGlh5dl4nq6G9wZpqP4hdDEzfY,2935
119
119
  returnn/frontend/stepwise_scheduler.py,sha256=fMOTR7npGCDXrXDmSQ4VwmudoHEbY3Yr-QGyjFdQJSc,927
120
120
  returnn/frontend/tensor_array.py,sha256=Ej7CHtvpY0yBROlAk5vFe3CTXh-iAuqu9qcXS3Qxt2I,4328
@@ -253,8 +253,8 @@ returnn/util/sig_proc.py,sha256=Tjz0VOAVyqu2qDCF5HZ1JjALjcFsHcNkcd96WgZeKfE,7265
253
253
  returnn/util/task_system.py,sha256=y4sMVXQ25Qd2z0rx03uOlXlkE-jbCYC1Sjfn-XlraVU,26003
254
254
  returnn/util/train_proc_manager.py,sha256=Pjht28k6uz6BNQ47uW6Gf880iyq5q4wx7P_K2tmoAM8,3266
255
255
  returnn/util/watch_memory.py,sha256=BR5P2kvBN6UI81cE0_1WAA6Hd1SByLbBaiDxvLhPOew,4213
256
- returnn-1.20250828.142552.dist-info/LICENSE,sha256=ywBD_U2aD4vpuoIgNAsjIGBYydl0tVKll3De0Z8s77c,11041
257
- returnn-1.20250828.142552.dist-info/METADATA,sha256=Uvf8zgSBctl_SphH-v2ikfVE9N-jlwEAGCiUPtlx8iY,5215
258
- returnn-1.20250828.142552.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
259
- returnn-1.20250828.142552.dist-info/top_level.txt,sha256=Lsn4WZc5Pbfk0-xDQOgnFCxOoqxL4CyeM3N1TFbJncw,8
260
- returnn-1.20250828.142552.dist-info/RECORD,,
256
+ returnn-1.20250830.114445.dist-info/LICENSE,sha256=ywBD_U2aD4vpuoIgNAsjIGBYydl0tVKll3De0Z8s77c,11041
257
+ returnn-1.20250830.114445.dist-info/METADATA,sha256=PZOQvfJJKkUgAsvh_nvFeAdQOFCWpBuj2T4euwap8VA,5215
258
+ returnn-1.20250830.114445.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
259
+ returnn-1.20250830.114445.dist-info/top_level.txt,sha256=Lsn4WZc5Pbfk0-xDQOgnFCxOoqxL4CyeM3N1TFbJncw,8
260
+ returnn-1.20250830.114445.dist-info/RECORD,,