returnn 1.20250226.115259__py3-none-any.whl → 1.20250226.132109__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of returnn might be problematic. Click here for more details.

returnn/PKG-INFO CHANGED
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: returnn
3
- Version: 1.20250226.115259
3
+ Version: 1.20250226.132109
4
4
  Summary: The RWTH extensible training framework for universal recurrent neural networks
5
5
  Home-page: https://github.com/rwth-i6/returnn/
6
6
  Author: Albert Zeyer
@@ -1,2 +1,2 @@
1
- version = '1.20250226.115259'
2
- long_version = '1.20250226.115259+git.0d32534'
1
+ version = '1.20250226.132109'
2
+ long_version = '1.20250226.132109+git.7dbfd3a'
@@ -325,6 +325,7 @@ def window(
325
325
  padding: str = "same",
326
326
  pad_value: Optional[Union[int, float]] = None,
327
327
  stride: int = 1,
328
+ use_mask: Optional[bool] = None,
328
329
  ) -> Tuple[Tensor, Dim]:
329
330
  """
330
331
  Follows the same idea as RETURNN tf_util.windowed,
@@ -338,8 +339,14 @@ def window(
338
339
  :param padding: "same" or "valid"
339
340
  :param pad_value:
340
341
  :param stride:
342
+ :param use_mask: whether we should mask to make sure the zero padding is correct
341
343
  :return: out, out_spatial_dim
342
344
  """
345
+ if spatial_dim.need_masking():
346
+ if use_mask is None:
347
+ use_mask = rf.use_mask_default(default=True, default_false_for_behavior_version_up_to=22)
348
+ if use_mask:
349
+ source = source.copy_masked(0, dims=[spatial_dim])
343
350
  assert window_dim.dimension is not None
344
351
  if padding == "same":
345
352
  out_spatial_dim = spatial_dim
@@ -784,6 +791,7 @@ def scatter(
784
791
  mode: str = "sum",
785
792
  fill_value: Optional[Union[int, float]] = None,
786
793
  out_dim: Optional[Union[Dim, Sequence[Dim]]] = None,
794
+ use_mask: Optional[bool] = None,
787
795
  ) -> Tensor:
788
796
  """
789
797
  Scatters into new zero-tensor.
@@ -806,6 +814,7 @@ def scatter(
806
814
  If not given, will be automatically determined as the sparse_dim from indices.
807
815
  If multiple out dims, use indices into the merged out dims,
808
816
  and then we use :func:`rf.split_dims` afterwards.
817
+ :param use_mask:
809
818
  :return: [batch_dims..., out_dim(s)..., feature_dims...]
810
819
  """
811
820
  if mode == "logsumexp":
@@ -826,24 +835,32 @@ def scatter(
826
835
  fill_value = 0
827
836
  elif mode == "max":
828
837
  if "int" in source.dtype:
829
- import numpy
830
-
831
838
  fill_value = numpy.iinfo(source.raw_tensor.dtype).min
832
839
  else:
833
840
  fill_value = float("-inf")
834
841
  elif mode == "min":
835
842
  if "int" in source.dtype:
836
- import numpy
837
-
838
843
  fill_value = numpy.iinfo(source.raw_tensor.dtype).max
839
844
  else:
840
845
  fill_value = float("inf")
841
846
  else:
842
847
  raise ValueError(f"scatter: invalid mode {mode!r}")
848
+ indices_dim = indices_dim if isinstance(indices_dim, (list, tuple)) else [indices_dim]
849
+ if any(dim.need_masking() for dim in indices_dim):
850
+ if use_mask is None:
851
+ use_mask = rf.use_mask_default(default=True, default_false_for_behavior_version_up_to=22)
852
+ if use_mask:
853
+ source = source.copy_masked(fill_value, dims=indices_dim)
854
+ else:
855
+ use_mask = False
843
856
  # noinspection PyProtectedMember
844
- return source._raw_backend.scatter(
857
+ out = source._raw_backend.scatter(
845
858
  source, indices=indices, indices_dim=indices_dim, mode=mode, fill_value=fill_value, out_dim=out_dim
846
859
  )
860
+ if use_mask and mode != "sum":
861
+ # Make sure we don't leave any infinities in the output.
862
+ out = out.copy_masked(0, dims=[out_dim])
863
+ return out
847
864
 
848
865
 
849
866
  def scatter_argmax(
returnn/frontend/conv.py CHANGED
@@ -34,8 +34,8 @@ class _ConvOrTransposedConv(rf.Module):
34
34
  Base class for both convolution and transposed convolution.
35
35
  """
36
36
 
37
- nd: Optional[int] = None
38
- _transposed: bool
37
+ nd: Optional[int] = None # set in the subclasses, e.g. 1 for Conv1d, etc
38
+ _transposed: bool # set in the subclasses _Conv or _TransposedConv
39
39
  groups: Optional[int] = None
40
40
 
41
41
  def __init__(
@@ -187,8 +187,14 @@ def conv(
187
187
  dilation_rate: Optional[Union[int, Sequence[int]]] = None,
188
188
  groups: Optional[int] = None,
189
189
  bias: Optional[Tensor] = None,
190
+ use_mask: Optional[bool] = None,
190
191
  ) -> Tuple[Tensor, Sequence[Dim]]:
191
192
  """convolution"""
193
+ if any(in_spatial_dim.need_masking() for in_spatial_dim in in_spatial_dims):
194
+ if use_mask is None:
195
+ use_mask = rf.use_mask_default(default=True, default_false_for_behavior_version_up_to=22)
196
+ if use_mask:
197
+ source = source.copy_masked(0, dims=in_spatial_dims)
192
198
  for in_spatial_dim in in_spatial_dims:
193
199
  if in_spatial_dim not in source.dims:
194
200
  raise ValueError(f"conv: source {source} does not have spatial dim {in_spatial_dim}")
@@ -345,8 +351,14 @@ def transposed_conv(
345
351
  output_padding: Optional[Union[Sequence[Optional[int]], int]] = None,
346
352
  strides: Optional[Sequence[int]] = None,
347
353
  bias: Optional[Tensor] = None,
354
+ use_mask: Optional[bool] = None,
348
355
  ) -> Tuple[Tensor, Sequence[Dim]]:
349
356
  """transposed conv"""
357
+ if any(in_spatial_dim.need_masking() for in_spatial_dim in in_spatial_dims):
358
+ if use_mask is None:
359
+ use_mask = rf.use_mask_default(default=True, default_false_for_behavior_version_up_to=22)
360
+ if use_mask:
361
+ source = source.copy_masked(0, dims=in_spatial_dims)
350
362
  # noinspection PyProtectedMember
351
363
  out, out_spatial_dims = source._raw_backend.transposed_conv(
352
364
  source=source,
@@ -394,6 +406,7 @@ class TransposedConv3d(_TransposedConv):
394
406
  def pool(
395
407
  source: Tensor,
396
408
  *,
409
+ nd: Optional[int] = None,
397
410
  mode: str,
398
411
  pool_size: Union[Sequence[int], int],
399
412
  padding: str = "valid",
@@ -401,22 +414,23 @@ def pool(
401
414
  strides: Optional[Union[Sequence[int], int]] = None,
402
415
  in_spatial_dims: Union[Sequence[Dim], Dim],
403
416
  out_spatial_dims: Optional[Union[Sequence[Dim], Dim]] = None,
404
- nd: Optional[int] = None,
417
+ use_mask: Optional[bool] = None,
405
418
  ) -> Tuple[Tensor, Sequence[Dim]]:
406
419
  """
407
420
  A generic N-D pooling layer.
408
421
  This would usually be done after a convolution for down-sampling.
409
422
 
410
- :param Tensor source:
423
+ :param source:
411
424
  :param nd:
412
- :param str mode: "max" or "avg"
413
- :param tuple[int] pool_size: shape of the window of each reduce
414
- :param str padding: "valid" or "same"
415
- :param tuple[int]|int dilation_rate:
416
- :param tuple[int]|int|None strides: in contrast to tf.nn.pool, the default (if it is None) will be set to pool_size
417
- :param Sequence[Dim] in_spatial_dims:
418
- :param Sequence[Dim]|None out_spatial_dims:
419
- :return: layer, out_spatial_dims
425
+ :param mode: "max" or "avg"
426
+ :param pool_size: shape of the window of each reduce
427
+ :param padding: "valid" or "same"
428
+ :param dilation_rate:
429
+ :param strides: in contrast to tf.nn.pool, the default (if it is None) will be set to pool_size
430
+ :param in_spatial_dims:
431
+ :param out_spatial_dims:
432
+ :param use_mask:
433
+ :return: out, out_spatial_dims
420
434
  """
421
435
  if isinstance(in_spatial_dims, Dim):
422
436
  in_spatial_dims = [in_spatial_dims]
@@ -440,6 +454,14 @@ def pool(
440
454
  assert isinstance(strides, (list, tuple))
441
455
  assert len(strides) == nd
442
456
 
457
+ if any(in_spatial_dim.need_masking() for in_spatial_dim in in_spatial_dims):
458
+ if use_mask is None:
459
+ use_mask = rf.use_mask_default(default=True, default_false_for_behavior_version_up_to=22)
460
+ if use_mask:
461
+ source = source.copy_masked({"max": float("-inf"), "avg": 0}[mode], dims=in_spatial_dims)
462
+ else:
463
+ use_mask = False
464
+
443
465
  # noinspection PyProtectedMember
444
466
  out, out_spatial_dims = source._raw_backend.pool(
445
467
  source=source,
@@ -451,6 +473,10 @@ def pool(
451
473
  in_spatial_dims=in_spatial_dims,
452
474
  out_spatial_dims=out_spatial_dims,
453
475
  )
476
+ if use_mask and mode == "max":
477
+ # We masked with -inf for max-pooling to get correct pooling at the boundaries.
478
+ # However, the resulting tensor might have -inf in it, and it is better to mask it out.
479
+ out = out.copy_masked(0, dims=out_spatial_dims)
454
480
  return out, out_spatial_dims
455
481
 
456
482
 
returnn/frontend/dims.py CHANGED
@@ -22,6 +22,7 @@ __all__ = [
22
22
  "num_elements_of_shape",
23
23
  "masked_fraction_of_shape",
24
24
  "last_frame_position_of_dim",
25
+ "use_mask_default",
25
26
  ]
26
27
 
27
28
 
@@ -305,3 +306,42 @@ def last_frame_position_of_dim(
305
306
  pos = rf.maximum(pos, 0)
306
307
  pos.sparse_dim = dim
307
308
  return pos
309
+
310
+
311
+ def use_mask_default(
312
+ *, default: Optional[bool] = None, default_false_for_behavior_version_up_to: Optional[int] = None
313
+ ) -> Optional[bool]:
314
+ """
315
+ Check the global RETURNN config for the ``rf_use_mask``
316
+ on what default we should use for the ``use_mask`` argument in various functions
317
+ (e.g. :func:`conv`, :func:`pool`, :func:`reduce`, :func:`matmul`, ...).
318
+
319
+ See issue `#1691 <https://github.com/rwth-i6/returnn/issues/1691>`__.
320
+
321
+ :param default: what to return if it is not defined in the config,
322
+ and ``default_false_for_behavior_version_up_to`` does not apply.
323
+ :param default_false_for_behavior_version_up_to: if it is not defined in the config,
324
+ and if this is set, and the behavior version is less or equal,
325
+ then return False by default, i.e. do not use the mask by default, if it is not defined in the config.
326
+ This takes precedence over `default`.
327
+ :return: what to use for the ``use_mask`` argument by default
328
+ """
329
+ from returnn.config import get_global_config
330
+
331
+ config = get_global_config(raise_exception=False)
332
+ config_value = None
333
+ if config:
334
+ if "rf_use_mask" in config.typed_dict:
335
+ config_value = config.typed_dict["rf_use_mask"]
336
+ assert config_value is None or isinstance(config_value, bool)
337
+ elif "rf_use_mask" in config.dict:
338
+ config_value = config.bool("rf_use_mask", None)
339
+ if config_value is not None:
340
+ return config_value
341
+
342
+ if default_false_for_behavior_version_up_to is not None:
343
+ from returnn.util.basic import BehaviorVersion
344
+
345
+ if BehaviorVersion.get() <= default_false_for_behavior_version_up_to:
346
+ return False
347
+ return default
@@ -218,10 +218,9 @@ class BatchNorm(rf.Module):
218
218
 
219
219
  if any(d.need_masking() for d in source.dims if d != self.in_dim):
220
220
  if self.use_mask is None:
221
- raise ValueError(
222
- f"{self}: use_mask must be specified if the input {source} has any dynamic spatial dims"
223
- )
224
- use_mask = self.use_mask
221
+ use_mask = rf.use_mask_default(default=True)
222
+ else:
223
+ use_mask = self.use_mask
225
224
  else:
226
225
  use_mask = False # not needed. False because this potentially enables an efficient fused op.
227
226
 
@@ -6,6 +6,7 @@ stft etc
6
6
  from __future__ import annotations
7
7
  from typing import Optional, Tuple
8
8
  from returnn.tensor import Tensor, Dim
9
+ import returnn.frontend as rf
9
10
 
10
11
 
11
12
  __all__ = ["stft"]
@@ -23,6 +24,7 @@ def stft(
23
24
  window_enforce_even: bool = True,
24
25
  out_spatial_dim: Optional[Dim] = None,
25
26
  out_dim: Optional[Dim] = None,
27
+ use_mask: Optional[bool] = None,
26
28
  ) -> Tuple[Tensor, Dim, Dim]:
27
29
  """
28
30
  Calculate the short-time Fourier transform (STFT) of a signal.
@@ -65,8 +67,14 @@ def stft(
65
67
  but in most other frameworks, the behavior matches to window_enforce_even=False.
66
68
  :param out_spatial_dim:
67
69
  :param out_dim:
70
+ :param use_mask:
68
71
  :return: (stft, out_spatial_dim, out_dim)
69
72
  """
73
+ if in_spatial_dim.need_masking():
74
+ if use_mask is None:
75
+ use_mask = rf.use_mask_default(default=True, default_false_for_behavior_version_up_to=22)
76
+ if use_mask:
77
+ x = x.copy_masked(0, dims=[in_spatial_dim])
70
78
  fft_length = fft_length or frame_length
71
79
  if out_dim is None:
72
80
  out_dim = Dim(fft_length // 2 + 1, name="stft-freq")
returnn/util/basic.py CHANGED
@@ -219,7 +219,7 @@ class BehaviorVersion:
219
219
  See :ref:`behavior_version`.
220
220
  """
221
221
 
222
- _latest_behavior_version = 22
222
+ _latest_behavior_version = 23
223
223
  _behavior_version = None # type: typing.Optional[int]
224
224
  _min_behavior_version = 0 # type: int
225
225
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: returnn
3
- Version: 1.20250226.115259
3
+ Version: 1.20250226.132109
4
4
  Summary: The RWTH extensible training framework for universal recurrent neural networks
5
5
  Home-page: https://github.com/rwth-i6/returnn/
6
6
  Author: Albert Zeyer
@@ -1,9 +1,9 @@
1
- returnn/PKG-INFO,sha256=2Ws--V5aicc3WJ-I6OrqPbbrvVNTH3Cnno6L7yeIyKY,5215
1
+ returnn/PKG-INFO,sha256=1tdi71kdx2tM5iCiEhtBkyM0jO74aiySG7bvbniZgz0,5215
2
2
  returnn/__init__.py,sha256=biBtRsM0WZ406vShaeH-9WFoqJ8XwTbn6g0EeFJ7l8E,1012
3
3
  returnn/__main__.py,sha256=qBFbuB1yN3adgVM5pXt2-Yq9vorjRNchNPL8kDKx44M,31752
4
4
  returnn/__old_mod_loader__.py,sha256=nvsNY-xELdS_IPNkv66Q9Rmvg4dbGW0-EBRDcCmctos,7654
5
5
  returnn/__setup__.py,sha256=22kQn2fh11iPM0hLb2Fy5sLmoU1JGvmDxXRYuRgQkwU,4659
6
- returnn/_setup_info_generated.py,sha256=3ur2a8rg2h6MJe2vAo7Tq4axfkV1GYJMcaQdnsmshb8,77
6
+ returnn/_setup_info_generated.py,sha256=GFgn-7LeKnvijLbLCBASDWIfBb96hZ17jlTud_Q9xAM,77
7
7
  returnn/config.py,sha256=3tmKhB6FnQZaNdtcYsiB61JnEY--iZ2qmJ4yq0b6tE0,29140
8
8
  returnn/forward_iface.py,sha256=A_OJiaXsX4MlXQRzST86ylyxSUZbC402PQL1REcqHjM,911
9
9
  returnn/learning_rate_control.py,sha256=ZvWryAn_tv9DhV8sh1LV3eE34Yltl3On3mYZAG4hR9s,34684
@@ -80,7 +80,7 @@ returnn/frontend/_cache.py,sha256=JAhi7L-raQ3A-NC3JUYDtdRTwT3BGJJGGZxrZ8MfEWQ,84
80
80
  returnn/frontend/_numpy_backend.py,sha256=2oCtG0YCWL_89v4cD_jDj8em1O_Fp-_YWl5EblGi_yo,7858
81
81
  returnn/frontend/_random_journal.py,sha256=_ktP_mjgx8vtQQGX_DofdhewJj0aPiczefTWeemPkmo,5457
82
82
  returnn/frontend/_utils.py,sha256=4A3MSRM0i86J77550uR_AjcBEPu6nymLUZ9Xd1V3Fkc,12073
83
- returnn/frontend/array_.py,sha256=M5vCeH0nlwJ-zrdjbZpsMLN6StOLn0iM7PnXvGLLE3g,49154
83
+ returnn/frontend/array_.py,sha256=eYwH-NVAoHpVrFdJv08lCqh3jvfoZV_ZBEoWHjsBz0o,50090
84
84
  returnn/frontend/attention.py,sha256=GKt-Xqnz8sIyXVrE0i4VCS7J2Wu7dmoH_BA0Cu8CrXQ,45769
85
85
  returnn/frontend/backend.py,sha256=iQ9w4xl8Ea7bgpb0VUaCKq50rV5Bl2E5J8Rhd-oqD_c,883
86
86
  returnn/frontend/build_from_dict.py,sha256=rfWa2rjjhIR_kIQED_nMrygrQBunS6unegzWTLVbC98,3017
@@ -88,9 +88,9 @@ returnn/frontend/cond.py,sha256=gh6wg0aSbAJQfKRv4BQAu-EfPWtWPLFjgc8IaPPFmwg,1023
88
88
  returnn/frontend/const.py,sha256=bL51HXxq858dWmrKd61k8tWBWIe67jVf9pj1wZcZZAo,3945
89
89
  returnn/frontend/container.py,sha256=wF3OlQN7WlOVmmdapUth_Unha3DVf6h1B7okBJAuJDA,8011
90
90
  returnn/frontend/control_flow_ctx.py,sha256=v17CsNwRnZYe8GdMtGJt2ftibfxMCGK1i0l-GX5ILu0,699
91
- returnn/frontend/conv.py,sha256=51LZovcRzITDLXvPcJs_MFsGEY_MFvO_MFF9D-jZstA,22481
91
+ returnn/frontend/conv.py,sha256=p4R6j40GCvVrw3kbQQJtfxY6tfIR8Rb3tIzwAtiLuec,23858
92
92
  returnn/frontend/device.py,sha256=K7Y1qoQcO4GIHgLkPLQWK-GVT8gKL8GwyQrmPo8LgBE,1438
93
- returnn/frontend/dims.py,sha256=hKA7IQRB0DbohN1ngNw31W44BsyjdHCtYAccxOcumzQ,10872
93
+ returnn/frontend/dims.py,sha256=aH5FQ_m0xMD6Rj-BUWGx8lB-HkCuwZfMBf6mZbGGW5E,12611
94
94
  returnn/frontend/dropout.py,sha256=rsx3p5b0NblBfXXSQZTQFJ8jUUS3fj4Qzc39iffBMCA,5006
95
95
  returnn/frontend/dtype.py,sha256=Ooc5BrcNrTp6XShuFEV9g5V6-niuy4ImP_Lt_Qgq3jE,1886
96
96
  returnn/frontend/gradient.py,sha256=G-Qv4gKGHYEeB92Zwco9ao4qjd6umZPUzQC4J-fbYWo,4033
@@ -105,7 +105,7 @@ returnn/frontend/math_.py,sha256=KlJxdIib8ENlid7cc4lcwHv5e21tzTjTEV8VgEDAijo,169
105
105
  returnn/frontend/matmul.py,sha256=3QaGiZtSs9PriT40T7Vc3KnYKPgYSN4tCZytYeq9qMA,1945
106
106
  returnn/frontend/module.py,sha256=219rh5mE0CD0-NdxXLsKyhv3BNtOI9jSyiI1Rb8MOyU,10700
107
107
  returnn/frontend/nested.py,sha256=CT3C0wXkeWGjJcAoF6yebsRXuN8-YpjO2eqgdl1-vaE,11005
108
- returnn/frontend/normalization.py,sha256=fvkrMB0xlBeeagFHDGnZ7SvgpJ2TBirfhq3u0RGJFHw,14139
108
+ returnn/frontend/normalization.py,sha256=QIjXYg0C8BD2g_1lAkVO4Cara729uHC_bsQh99VsWeI,14061
109
109
  returnn/frontend/parameter.py,sha256=w6SN-uv87OyeWBt90_3UBbK0h6sftSOCxkqXPg76caY,10375
110
110
  returnn/frontend/parametrizations.py,sha256=hVbOlgm1pQAmZnAnNxq8Tk23rykr_iy3-6R1H6CwlMA,2798
111
111
  returnn/frontend/parametrize.py,sha256=VhgTEP7ehON950Q4bkCy8rvg9641moEKAXn0XzomK6E,7216
@@ -114,7 +114,7 @@ returnn/frontend/rand.py,sha256=Levgf5VtOOBKDSgz0869Jf3VW4BWxYZuRXsa_fOxNI4,1296
114
114
  returnn/frontend/rec.py,sha256=4m20LvsPJ75pRYykVrup6Csj_D7duG-dW28SaJh-sq8,7863
115
115
  returnn/frontend/reduce.py,sha256=-Zt-OH6Zbtb9uR6YEzurCyrowH-anIXvuga6Pla2V70,10220
116
116
  returnn/frontend/run_ctx.py,sha256=ItcZwuFItkZjYWrg715L1Za2Xg7__MQCrRCAwBeTUxA,21411
117
- returnn/frontend/signal.py,sha256=8miFjBropGt2xQNwjLcSd9VaiKc04nHo04iPfEwdB8M,4078
117
+ returnn/frontend/signal.py,sha256=XgOBL1iy-cJgulePH5HRPAwp2cScy60q4RItr7xzvGc,4412
118
118
  returnn/frontend/state.py,sha256=EePdrx6PtWL4mJ2XZmGlh5dl4nq6G9wZpqP4hdDEzfY,2935
119
119
  returnn/frontend/stepwise_scheduler.py,sha256=fMOTR7npGCDXrXDmSQ4VwmudoHEbY3Yr-QGyjFdQJSc,927
120
120
  returnn/frontend/tensor_array.py,sha256=Ej7CHtvpY0yBROlAk5vFe3CTXh-iAuqu9qcXS3Qxt2I,4328
@@ -233,7 +233,7 @@ returnn/torch/util/gradient_checkpoint.py,sha256=iLy-FB65DC8O6LxzmMvFjnSdpIVpko8
233
233
  returnn/torch/util/module.py,sha256=MXHIrF9Isu575DDJIa81212ULKwdqu1oOLxDVZecVSk,1693
234
234
  returnn/torch/util/scaled_gradient.py,sha256=3585VuNypBty-pW6r3BKK047H3MqZQSdMjXeYAb4cmU,3192
235
235
  returnn/util/__init__.py,sha256=UIG1qw4idqhW71BV60ha7h9PktxvEVcBIu0lYRossK8,336
236
- returnn/util/basic.py,sha256=__rtDp8crZfm0mEeAKsRxNCdWuBHh9OeOm8UO-X4CJU,142380
236
+ returnn/util/basic.py,sha256=Iynt9ATEs_8DaZsX5z6weMyaO2xW9o3gaywq6X7mbEc,142380
237
237
  returnn/util/better_exchook.py,sha256=MVMnuu6KoyqgvlMeQLQNTfdspcPR9MwigCXOpeTVqCI,62956
238
238
  returnn/util/bpe.py,sha256=LWFhICZsEOnMwNws0lybPNzKRX6rSr8yKCvP65vjl9Y,19656
239
239
  returnn/util/debug.py,sha256=wuRzdg9zB84WWCGyTjmRR_zYypu8gXxlc0nZ6si9OC8,28224
@@ -253,8 +253,8 @@ returnn/util/sig_proc.py,sha256=Tjz0VOAVyqu2qDCF5HZ1JjALjcFsHcNkcd96WgZeKfE,7265
253
253
  returnn/util/task_system.py,sha256=y4sMVXQ25Qd2z0rx03uOlXlkE-jbCYC1Sjfn-XlraVU,26003
254
254
  returnn/util/train_proc_manager.py,sha256=Pjht28k6uz6BNQ47uW6Gf880iyq5q4wx7P_K2tmoAM8,3266
255
255
  returnn/util/watch_memory.py,sha256=BR5P2kvBN6UI81cE0_1WAA6Hd1SByLbBaiDxvLhPOew,4213
256
- returnn-1.20250226.115259.dist-info/LICENSE,sha256=ywBD_U2aD4vpuoIgNAsjIGBYydl0tVKll3De0Z8s77c,11041
257
- returnn-1.20250226.115259.dist-info/METADATA,sha256=2Ws--V5aicc3WJ-I6OrqPbbrvVNTH3Cnno6L7yeIyKY,5215
258
- returnn-1.20250226.115259.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
259
- returnn-1.20250226.115259.dist-info/top_level.txt,sha256=Lsn4WZc5Pbfk0-xDQOgnFCxOoqxL4CyeM3N1TFbJncw,8
260
- returnn-1.20250226.115259.dist-info/RECORD,,
256
+ returnn-1.20250226.132109.dist-info/LICENSE,sha256=ywBD_U2aD4vpuoIgNAsjIGBYydl0tVKll3De0Z8s77c,11041
257
+ returnn-1.20250226.132109.dist-info/METADATA,sha256=1tdi71kdx2tM5iCiEhtBkyM0jO74aiySG7bvbniZgz0,5215
258
+ returnn-1.20250226.132109.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
259
+ returnn-1.20250226.132109.dist-info/top_level.txt,sha256=Lsn4WZc5Pbfk0-xDQOgnFCxOoqxL4CyeM3N1TFbJncw,8
260
+ returnn-1.20250226.132109.dist-info/RECORD,,