returnn 1.20250701.140328__py3-none-any.whl → 1.20250704.120801__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of returnn might be problematic. Click here for more details.

returnn/PKG-INFO CHANGED
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: returnn
3
- Version: 1.20250701.140328
3
+ Version: 1.20250704.120801
4
4
  Summary: The RWTH extensible training framework for universal recurrent neural networks
5
5
  Home-page: https://github.com/rwth-i6/returnn/
6
6
  Author: Albert Zeyer
@@ -1,2 +1,2 @@
1
- version = '1.20250701.140328'
2
- long_version = '1.20250701.140328+git.6646903'
1
+ version = '1.20250704.120801'
2
+ long_version = '1.20250704.120801+git.b693591'
returnn/datasets/meta.py CHANGED
@@ -1990,9 +1990,13 @@ class VariableDataset(Dataset):
1990
1990
  class MultiEpochDataset(CachedDataset2):
1991
1991
  """
1992
1992
  It wraps some dataset, where one outer epoch corresponds to multiple epochs in the inner wrapped dataset.
1993
+ I.e. one iteration through this dataset corresponds to multiple iterations through the inner dataset.
1993
1994
 
1994
- This can be useful when the inner dataset uses partition_epoch, and we want to cover the whole full epoch.
1995
+ This can be useful for forwarding, when you want to do multiple iterations through the dataset.
1996
+ This could be useful for clustering.
1995
1997
 
1998
+ This can also be useful when the inner dataset uses (or must use) partition_epoch,
1999
+ and we want to cover the whole full epoch:
1996
2000
  One specific example when the data is distributed over multiple files,
1997
2001
  and for reasonable performance, you want to have the data copied to the local disk,
1998
2002
  but all data together is too large to fit on the local disk.
@@ -2041,7 +2045,11 @@ class MultiEpochDataset(CachedDataset2):
2041
2045
  return self._dataset.get_all_tags()
2042
2046
 
2043
2047
  def get_total_num_seqs(self, *, fast: bool = False) -> int:
2044
- """total num seqs"""
2048
+ """
2049
+ Total num seqs.
2050
+ Note that this is the total number of seqs in the inner dataset,
2051
+ so without the multi-epoch handling.
2052
+ """
2045
2053
  return self._dataset.get_total_num_seqs(fast=fast)
2046
2054
 
2047
2055
  def get_data_keys(self) -> List[str]:
@@ -220,7 +220,8 @@ class Backend(Generic[T]):
220
220
  """
221
221
  :param a:
222
222
  :param kind: "add", "sub", "mul", "truediv", "floordiv", "mod", "pow",
223
- "maximum", "minimum", "logical_and", "logical_or", "squared_difference"
223
+ "maximum", "minimum", "logical_and", "logical_or", "squared_difference",
224
+ "logaddexp"
224
225
  :param b:
225
226
  :return: a `kind` b
226
227
  """
@@ -316,6 +316,7 @@ bool PyModuleState::_cachedOpInitTorch() {
316
316
  AddOp(TOp_Maximum, "clamp_min");
317
317
  AddOp(TOp_Minimum, "clamp_max");
318
318
  AddOpAlt(TOp_SquaredDifference, "squared_difference");
319
+ AddOp(TOp_LogAddExp, "logaddexp");
319
320
  AddOp(TOp_And, "logical_and");
320
321
  AddOp(TOp_Or, "logical_or");
321
322
  AddOp(TOp_Neg, "neg");
@@ -356,6 +357,7 @@ const char* rawOpName(RawOp op) {
356
357
  names[TOp_Maximum] = "maximum";
357
358
  names[TOp_Minimum] = "minimum";
358
359
  names[TOp_SquaredDifference] = "squared_difference";
360
+ names[TOp_LogAddExp] = "logaddexp";
359
361
  names[TOp_And] = "logical_and";
360
362
  names[TOp_Or] = "logical_or";
361
363
  // The names for the unary funcs matter:
@@ -34,6 +34,7 @@ enum RawOp {
34
34
  TOp_Maximum,
35
35
  TOp_Minimum,
36
36
  TOp_SquaredDifference,
37
+ TOp_LogAddExp,
37
38
 
38
39
  TOp_And,
39
40
  TOp_Or,
@@ -1468,6 +1468,7 @@ static PyObject* _pyTensorCompareOrCombine(PyObject *self, PyObject *args, PyObj
1468
1468
  kindToCombineFunc["logical_and"] = TOp_And;
1469
1469
  kindToCombineFunc["logical_or"] = TOp_Or;
1470
1470
  kindToCombineFunc["squared_difference"] = TOp_SquaredDifference;
1471
+ kindToCombineFunc["logaddexp"] = TOp_LogAddExp;
1471
1472
  }
1472
1473
 
1473
1474
  auto it = isCompare ? kindToCompareFunc.find(kind) : kindToCombineFunc.find(kind);
returnn/frontend/math_.py CHANGED
@@ -37,6 +37,7 @@ __all__ = [
37
37
  "logical_not",
38
38
  "opt_logical_or",
39
39
  "opt_logical_and",
40
+ "log_add_exp",
40
41
  "is_finite",
41
42
  "is_infinite",
42
43
  "is_neg_infinite",
@@ -173,7 +174,8 @@ def combine(
173
174
  """
174
175
  :param a:
175
176
  :param kind: "add"|"+", "sub"|"-", "mul"|"*", "truediv"|"/", "floordiv"|"//", "mod"|"%", "pow"|"**",
176
- "max"|"maximum", "min"|"minimum", "logical_and", "logical_or", "squared_difference"
177
+ "max"|"maximum", "min"|"minimum", "logical_and", "logical_or", "squared_difference",
178
+ "logaddexp"
177
179
  :param b:
178
180
  :param allow_broadcast_all_sources: if True, it is allowed that neither a nor b has all dims of the result.
179
181
  Not needed when out_dims is specified explicitly.
@@ -364,6 +366,16 @@ def opt_logical_and(a: Union[Tensor, bool], b: Union[Tensor, bool]) -> Union[Ten
364
366
  return combine(a, "logical_and", b)
365
367
 
366
368
 
369
+ def log_add_exp(a: Tensor, b: Tensor) -> Tensor:
370
+ """
371
+ Computes log(exp(a) + exp(b)) in a numerically stable way.
372
+ This is useful for log probabilities, e.g. in beam search.
373
+
374
+ See also: func:`reduce_logsumexp`.
375
+ """
376
+ return combine(a, "logaddexp", b)
377
+
378
+
367
379
  def is_finite(a: Tensor) -> Tensor:
368
380
  """is finite"""
369
381
  # noinspection PyProtectedMember
@@ -99,6 +99,8 @@ def reduce_logsumexp(source: Tensor[T], *, axis: Union[Dim, Sequence[Dim]], use_
99
99
  """
100
100
  Reduce the tensor along the given axis
101
101
 
102
+ Also see :func:`log_add_exp`.
103
+
102
104
  :param source:
103
105
  :param axis:
104
106
  :param use_mask: if True (default), use the time mask (part of dim tag) to ignore padding frames
@@ -247,7 +249,7 @@ class RunningMean(rf.Module):
247
249
  """
248
250
 
249
251
  def _update_running_stats():
250
- assert all(d in self.shape for d in x.dims)
252
+ assert all(d in x.dims for d in self.shape)
251
253
  x_ = rf.reduce_mean(x, axis=[d for d in x.dims if d not in self.shape])
252
254
  self.mean.assign_add(self.alpha * (x_ - self.mean))
253
255
 
@@ -9607,6 +9607,7 @@ class CombineLayer(LayerBase):
9607
9607
  `maximum`, `minimum`,
9608
9608
  `logical_and`, `logical_or`,
9609
9609
  `squared_difference`,
9610
+ `logaddexp`,
9610
9611
  or `eval`,
9611
9612
  or any function in the tf.math or tf namespace.
9612
9613
  :param list[LayerBase] sources:
@@ -9814,6 +9815,10 @@ class CombineLayer(LayerBase):
9814
9815
  assert kind == "eval" and eval_str
9815
9816
  return self._op_kind_eval(sources, eval_str=eval_str, eval_locals=eval_locals)
9816
9817
 
9818
+ if hasattr(self, "_op_kind_%s" % kind):
9819
+ func = getattr(self, "_op_kind_%s" % kind)
9820
+ return func(sources)
9821
+
9817
9822
  kind = {
9818
9823
  "+": "add",
9819
9824
  "-": "subtract",
@@ -9823,16 +9828,18 @@ class CombineLayer(LayerBase):
9823
9828
  "sub": "subtract",
9824
9829
  "mul": "multiply",
9825
9830
  }.get(kind, kind)
9831
+
9826
9832
  if hasattr(tf, "math") and hasattr(tf.math, kind):
9827
9833
  tf_func = getattr(tf.math, kind)
9828
9834
  elif hasattr(tf, kind):
9829
9835
  tf_func = getattr(tf, kind)
9836
+ elif hasattr(tf, "keras") and hasattr(tf.keras, "ops") and hasattr(tf.keras.ops, kind):
9837
+ tf_func = getattr(tf.keras.ops, kind)
9838
+ elif hasattr(tf, "experimental") and hasattr(tf.experimental, "numpy") and hasattr(tf.experimental.numpy, kind):
9839
+ tf_func = getattr(tf.experimental.numpy, kind)
9830
9840
  else:
9831
- tf_func = None
9832
- if tf_func:
9833
- return self._op_dense_fn(sources, tf_func, self.output)
9834
-
9835
- return getattr(self, "_op_kind_%s" % kind)(sources)
9841
+ raise ValueError(f"{self}: unknown kind {kind!r}")
9842
+ return self._op_dense_fn(sources, tf_func, self.output)
9836
9843
 
9837
9844
 
9838
9845
  class EvalLayer(CombineLayer):
@@ -10657,7 +10664,7 @@ class SearchSortedLayer(LayerBase):
10657
10664
  transposed_values_data = values_data.copy_transpose(perm=values_batch_axes + values_non_batch_axes) # [B,F]
10658
10665
  x = transposed_sorted_data.placeholder # [B,T]
10659
10666
  if transposed_sorted_data.dims[-1].need_masking():
10660
- from returnn.tf.util.basic import where_bc, sequence_mask
10667
+ from returnn.tf.util.basic import where_bc
10661
10668
 
10662
10669
  seq_mask = transposed_sorted_data.get_sequence_mask_broadcast(axis=-1)
10663
10670
  x = where_bc(seq_mask, x, x.dtype.max) # note: this is not correct if values contains x.dtype.max
@@ -840,7 +840,8 @@ class TorchBackend(Backend[torch.Tensor]):
840
840
  """
841
841
  :param a:
842
842
  :param kind: "add", "sub", "mul", "truediv", "floordiv", "mod", "pow",
843
- "maximum", "minimum", "logical_and", "logical_or", "squared_difference"
843
+ "maximum", "minimum", "logical_and", "logical_or", "squared_difference",
844
+ "logaddexp"
844
845
  :param b:
845
846
  :return: a `kind` b
846
847
  """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: returnn
3
- Version: 1.20250701.140328
3
+ Version: 1.20250704.120801
4
4
  Summary: The RWTH extensible training framework for universal recurrent neural networks
5
5
  Home-page: https://github.com/rwth-i6/returnn/
6
6
  Author: Albert Zeyer
@@ -1,9 +1,9 @@
1
- returnn/PKG-INFO,sha256=RByOzAa8sFb4gdGsaWiRUEOlZq3X9qqn4MEF0ziNPNc,5215
1
+ returnn/PKG-INFO,sha256=V7Fti4odxCkXq9_mO0Fn_AGvs5VVJWqu9W9UtYebguM,5215
2
2
  returnn/__init__.py,sha256=biBtRsM0WZ406vShaeH-9WFoqJ8XwTbn6g0EeFJ7l8E,1012
3
3
  returnn/__main__.py,sha256=lHyZcu_0yc9f7Vf_Kfdy9PmeU0T76XVXnpalHi5WKro,31740
4
4
  returnn/__old_mod_loader__.py,sha256=nvsNY-xELdS_IPNkv66Q9Rmvg4dbGW0-EBRDcCmctos,7654
5
5
  returnn/__setup__.py,sha256=22kQn2fh11iPM0hLb2Fy5sLmoU1JGvmDxXRYuRgQkwU,4659
6
- returnn/_setup_info_generated.py,sha256=FycWE5HycEgzbwEzgqhdpFdUBY_4Kg7Rlg43mZCjiBs,77
6
+ returnn/_setup_info_generated.py,sha256=Xh34434TSfRAFKxnLyywQQ3o1aXs2TrSEL_FezJ6b60,77
7
7
  returnn/config.py,sha256=3tmKhB6FnQZaNdtcYsiB61JnEY--iZ2qmJ4yq0b6tE0,29140
8
8
  returnn/forward_iface.py,sha256=A_OJiaXsX4MlXQRzST86ylyxSUZbC402PQL1REcqHjM,911
9
9
  returnn/learning_rate_control.py,sha256=ZvWryAn_tv9DhV8sh1LV3eE34Yltl3On3mYZAG4hR9s,34684
@@ -22,7 +22,7 @@ returnn/datasets/generating.py,sha256=9U_w6URIrv-Rb-hDbPOzYW9qYXzJbw32N6G268IKyo
22
22
  returnn/datasets/hdf.py,sha256=v5sjBenURR9Z-g7AQ9tsL84yDSye5RtbLpym3M6HSDE,67833
23
23
  returnn/datasets/lm.py,sha256=IqUsOzbdSWUynL0YFL25HbtMR4AxaQGHvjjqRE9IwBo,99215
24
24
  returnn/datasets/map.py,sha256=kOBJVZmwDhLsOplzDNByIfa0NRSUaMo2Lsy36lBvxrM,10907
25
- returnn/datasets/meta.py,sha256=KQtidTgSh-1gNgbpJ8OhXt6v2lkhPPH5dpjfzwsr3E4,95251
25
+ returnn/datasets/meta.py,sha256=6XPPxhiNSxWw9Hu5Z6wG8dD9Zk82FqiI-k9HGQSTKgw,95658
26
26
  returnn/datasets/multi_proc.py,sha256=aVjsLt2qjHnHOrEYCgIPCwNYE-f1fiGP6eZ8NGAr3A4,22583
27
27
  returnn/datasets/normalization_data.py,sha256=J3njQCMvWAbIAVPepO2L_Xdau9eWYB7Zyd6STeGzTbc,14615
28
28
  returnn/datasets/numpy_dump.py,sha256=wl8bKIKAlff2HPJPtuu5wBg3TLOf16d2wLVB4lLAwTM,5158
@@ -75,7 +75,7 @@ returnn/extern/graph_editor/subgraph.py,sha256=q9o0zVBLDrTIidaXg5WG5daDW0mLbwv2J
75
75
  returnn/extern/graph_editor/transform.py,sha256=qMGSenpbAnGqdG6QP6iWjlm6_ccySYJaZKOoAj1dbOM,29348
76
76
  returnn/extern/graph_editor/util.py,sha256=HfRbyQPmQ6_n5-O-096n0KeJtllQXFtaurpeJS_URZ0,18706
77
77
  returnn/frontend/__init__.py,sha256=2aS7nbxXniIrBp2DODl0xN0f3IJ_dX4Bi9ZlR7W5_DE,1472
78
- returnn/frontend/_backend.py,sha256=w-Xtn-2-uu0AGL0VAjP5QFuh_tpqX2FiSyLkPvZ5bPY,50446
78
+ returnn/frontend/_backend.py,sha256=pAnVAbZhIGKD-10tp0Mx7AO1GZNghYu7AVAPhiimN-k,50471
79
79
  returnn/frontend/_cache.py,sha256=JAhi7L-raQ3A-NC3JUYDtdRTwT3BGJJGGZxrZ8MfEWQ,8403
80
80
  returnn/frontend/_numpy_backend.py,sha256=fZjks7p3dgxVZ6tSDazTTgBxNjJqXjfqgw_7mA7rDEE,9066
81
81
  returnn/frontend/_random_journal.py,sha256=_ktP_mjgx8vtQQGX_DofdhewJj0aPiczefTWeemPkmo,5457
@@ -101,7 +101,7 @@ returnn/frontend/label_smoothing.py,sha256=lxmaowNr61sCMzMewqHhu1r0CcklYfhLXlFnB
101
101
  returnn/frontend/linear.py,sha256=xRUjnkD3MTWDezSaYATBYJQ2fa1RhKMNrTuhC54hhVs,2252
102
102
  returnn/frontend/loop.py,sha256=t-z6ke1X03I2aPUEqLYmVZWyMzfW3IedFvKUGc-TCX8,16160
103
103
  returnn/frontend/loss.py,sha256=uSvou2MPd13JiLAg_OIQ3AyyLvD3RHjMEVgFEN0gKqU,7440
104
- returnn/frontend/math_.py,sha256=KlJxdIib8ENlid7cc4lcwHv5e21tzTjTEV8VgEDAijo,16984
104
+ returnn/frontend/math_.py,sha256=A_RkZ5lH2uXMchfPIH3itraWtMNNCVckQHHpf7aIIZQ,17295
105
105
  returnn/frontend/matmul.py,sha256=xkueyxzSDz8MsYaWxPSjmV2Yy-tcaiOQDXbFt1IQM2A,1944
106
106
  returnn/frontend/module.py,sha256=219rh5mE0CD0-NdxXLsKyhv3BNtOI9jSyiI1Rb8MOyU,10700
107
107
  returnn/frontend/nested.py,sha256=P84u_cjoYdYRJ_0Cbt0vlKXxskmXTDfsnw_vFCCNKtU,15107
@@ -112,7 +112,7 @@ returnn/frontend/parametrize.py,sha256=VhgTEP7ehON950Q4bkCy8rvg9641moEKAXn0XzomK
112
112
  returnn/frontend/piecewise_linear.py,sha256=TdL6wzop8P1dcIZwkEbJFvSUZSI1cbhS3XKzlWQkEVI,1964
113
113
  returnn/frontend/rand.py,sha256=Levgf5VtOOBKDSgz0869Jf3VW4BWxYZuRXsa_fOxNI4,12969
114
114
  returnn/frontend/rec.py,sha256=6YSsSG7fdtfvvg24vmexSg8R2aVCcKHBdGLh-Mgn9Co,8037
115
- returnn/frontend/reduce.py,sha256=xvxN_h3LsMJdmT0IbW4nOf8qFhckuAniIhD9PalO6j0,10305
115
+ returnn/frontend/reduce.py,sha256=gRSvBJZNHa757IqBxGw4hu5eiO3pjie_ptEwUXHLSCs,10340
116
116
  returnn/frontend/run_ctx.py,sha256=yyOMUCKTOe19C4z2Nfly4YCLBmQ9ihip6nGrkW-Y6qg,23789
117
117
  returnn/frontend/signal.py,sha256=hfDipDhO0n9nXhGy7txwYUNbvg28NqkFq9p0Jq46f9c,4411
118
118
  returnn/frontend/state.py,sha256=EePdrx6PtWL4mJ2XZmGlh5dl4nq6G9wZpqP4hdDEzfY,2935
@@ -122,10 +122,10 @@ returnn/frontend/types.py,sha256=r-QsxPQyFSr9WwCRzqTn_X5jQLbjthrtjHavY8XIDmk,109
122
122
  returnn/frontend/_native/__init__.py,sha256=fVjazAujt0rdICXZL-GgW1sjFeL1HB4NPuy2m5rmMsc,6480
123
123
  returnn/frontend/_native/backend.cpp,sha256=MeHczHypwj_ncntOxRqanK8SqGyV9Eq1X0cpMWb_WII,4768
124
124
  returnn/frontend/_native/backend.hpp,sha256=Wq80dcEzXfRNxGOXFnIgHllkiv1rDi3KpHK-xxJsSDI,791
125
- returnn/frontend/_native/module.cpp,sha256=lS1Oypo3n6oCu6cxKAmqpNjSvQN9aMZIOeMec96FWYU,15626
126
- returnn/frontend/_native/module.hpp,sha256=uf4HPSTrFP2brGR_x9G5N1ZlZ-ok5GakMbNo4LbqxUg,6670
125
+ returnn/frontend/_native/module.cpp,sha256=9BCUoDTZDJ6hlXp4pUus1BlN7-oxcRy6tK9ctyCkwk0,15709
126
+ returnn/frontend/_native/module.hpp,sha256=iv4jvQidLaE8uC-YbaYjiXONTL_Pq7WUQKQ5MdFpdIs,6689
127
127
  returnn/frontend/_native/py_utils.hpp,sha256=vcxKGmOyDRuwsmmSEjoaCJyKMy1BNYoGlso2pZu7VoE,3139
128
- returnn/frontend/_native/tensor_ops.cpp,sha256=bYtwwn_NeJfAEHWYPEJlkoLDKt9baZ3RA8av7gtz2qc,70246
128
+ returnn/frontend/_native/tensor_ops.cpp,sha256=bA4Gf-q8cVENL441r1IYVd44EcUsV-eELyDzqmnCuw0,70302
129
129
  returnn/frontend/_native/tensor_ops.hpp,sha256=dDqvUejRNHjItnmOP5aHyAQbAmXmXoDVXSe3tveEU8A,3732
130
130
  returnn/frontend/audio/__init__.py,sha256=8mahwucBje8qHKw0bOvoySlvvD0rFKxviSvcAHSjiJY,67
131
131
  returnn/frontend/audio/mel.py,sha256=LNzC9aWWgLqua34bwxA--M9shtLlePfwLQ-HpvP2o54,7884
@@ -193,7 +193,7 @@ returnn/tf/frontend_low_level/__init__.py,sha256=34469k3KzMUIGowxReOZnbf6WdTjxY7
193
193
  returnn/tf/frontend_low_level/_backend.py,sha256=JwwRRIGnElqBC4bTImdB7w3U1u_SJESeZHYLmq86wog,24479
194
194
  returnn/tf/layers/__init__.py,sha256=Ngu-X84nWFgz7ndDu88DqoZ-5lUMMTQWH4g7N8pSoCg,72
195
195
  returnn/tf/layers/base.py,sha256=sUxEfh6WxaHWHG7O3cfxB6gG6YpEHkFKUJVayKvTBSI,152968
196
- returnn/tf/layers/basic.py,sha256=KOmEKBz5idk_-zL1XlKWmL4AIbgpwbHkGKZrszYrlM4,614963
196
+ returnn/tf/layers/basic.py,sha256=zHDPLP97jSvYYZcMPqQVOVxFk6I1BfXd71XVfs0VIkQ,615386
197
197
  returnn/tf/layers/rec.py,sha256=3f6M_5aAMPvx7aAHdPV3VSFRHf7tjpp8lrXSzmk1I5c,548435
198
198
  returnn/tf/layers/segmental_model.py,sha256=wUyDZGr-eTVIIQWcsHLML0wtOxuWn_NFKOIrUKQcvoI,21515
199
199
  returnn/tf/layers/signal_processing.py,sha256=vRlkN7k7otk9_Qdv0qr_l6V0VT5Q6dO2MxwZWb2HH2M,52693
@@ -216,7 +216,7 @@ returnn/torch/data/queued_data_iter.py,sha256=PoOsGHdHVZjTmcyfq_ZOw--P6hyfTdmAWI
216
216
  returnn/torch/data/returnn_dataset_wrapper.py,sha256=2CaDapzrlqahANuq-nyVAtv5ENHuM8A7okORwYJDisg,8006
217
217
  returnn/torch/data/tensor_utils.py,sha256=-Teqi--LLbt6q_5mDRdoHZHmPgSdC83W706ukif_YiU,1284
218
218
  returnn/torch/frontend/__init__.py,sha256=AA48HZnC17ASuKA0EWy8loZ-Bib_yUtqF4T1wYvjst4,62
219
- returnn/torch/frontend/_backend.py,sha256=L0SOhpm6c-9wbAUJIAgf9Qo8HVIyka2aHyAIJVt6W-A,101850
219
+ returnn/torch/frontend/_backend.py,sha256=a9qcpUJrSDtH7KR6ZIpB4sijm6ztRlZ4myAe2P0dtaE,101875
220
220
  returnn/torch/frontend/_rand.py,sha256=1JgIkV2XmpgJD86zXZ-NCAe-QuoP2swr6NaS1oz3Qa8,1830
221
221
  returnn/torch/frontend/bridge.py,sha256=c_mVBCBo29sjm8Bhxarv00szwGPgxjwoIqAHOmceGQw,7842
222
222
  returnn/torch/frontend/raw_ops.py,sha256=lF0h-KtYYsdaaqQADylVZp9qzPskOOXA4MfmYDyx5IU,296
@@ -253,8 +253,8 @@ returnn/util/sig_proc.py,sha256=Tjz0VOAVyqu2qDCF5HZ1JjALjcFsHcNkcd96WgZeKfE,7265
253
253
  returnn/util/task_system.py,sha256=y4sMVXQ25Qd2z0rx03uOlXlkE-jbCYC1Sjfn-XlraVU,26003
254
254
  returnn/util/train_proc_manager.py,sha256=Pjht28k6uz6BNQ47uW6Gf880iyq5q4wx7P_K2tmoAM8,3266
255
255
  returnn/util/watch_memory.py,sha256=BR5P2kvBN6UI81cE0_1WAA6Hd1SByLbBaiDxvLhPOew,4213
256
- returnn-1.20250701.140328.dist-info/LICENSE,sha256=ywBD_U2aD4vpuoIgNAsjIGBYydl0tVKll3De0Z8s77c,11041
257
- returnn-1.20250701.140328.dist-info/METADATA,sha256=RByOzAa8sFb4gdGsaWiRUEOlZq3X9qqn4MEF0ziNPNc,5215
258
- returnn-1.20250701.140328.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
259
- returnn-1.20250701.140328.dist-info/top_level.txt,sha256=Lsn4WZc5Pbfk0-xDQOgnFCxOoqxL4CyeM3N1TFbJncw,8
260
- returnn-1.20250701.140328.dist-info/RECORD,,
256
+ returnn-1.20250704.120801.dist-info/LICENSE,sha256=ywBD_U2aD4vpuoIgNAsjIGBYydl0tVKll3De0Z8s77c,11041
257
+ returnn-1.20250704.120801.dist-info/METADATA,sha256=V7Fti4odxCkXq9_mO0Fn_AGvs5VVJWqu9W9UtYebguM,5215
258
+ returnn-1.20250704.120801.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
259
+ returnn-1.20250704.120801.dist-info/top_level.txt,sha256=Lsn4WZc5Pbfk0-xDQOgnFCxOoqxL4CyeM3N1TFbJncw,8
260
+ returnn-1.20250704.120801.dist-info/RECORD,,