returnn 1.20250701.131223__py3-none-any.whl → 1.20250703.183400__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of returnn might be problematic. Click here for more details.

returnn/PKG-INFO CHANGED
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: returnn
3
- Version: 1.20250701.131223
3
+ Version: 1.20250703.183400
4
4
  Summary: The RWTH extensible training framework for universal recurrent neural networks
5
5
  Home-page: https://github.com/rwth-i6/returnn/
6
6
  Author: Albert Zeyer
@@ -1,2 +1,2 @@
1
- version = '1.20250701.131223'
2
- long_version = '1.20250701.131223+git.7346de8'
1
+ version = '1.20250703.183400'
2
+ long_version = '1.20250703.183400+git.a5c35a2'
@@ -220,7 +220,8 @@ class Backend(Generic[T]):
220
220
  """
221
221
  :param a:
222
222
  :param kind: "add", "sub", "mul", "truediv", "floordiv", "mod", "pow",
223
- "maximum", "minimum", "logical_and", "logical_or", "squared_difference"
223
+ "maximum", "minimum", "logical_and", "logical_or", "squared_difference",
224
+ "logaddexp"
224
225
  :param b:
225
226
  :return: a `kind` b
226
227
  """
@@ -316,6 +316,7 @@ bool PyModuleState::_cachedOpInitTorch() {
316
316
  AddOp(TOp_Maximum, "clamp_min");
317
317
  AddOp(TOp_Minimum, "clamp_max");
318
318
  AddOpAlt(TOp_SquaredDifference, "squared_difference");
319
+ AddOp(TOp_LogAddExp, "logaddexp");
319
320
  AddOp(TOp_And, "logical_and");
320
321
  AddOp(TOp_Or, "logical_or");
321
322
  AddOp(TOp_Neg, "neg");
@@ -356,6 +357,7 @@ const char* rawOpName(RawOp op) {
356
357
  names[TOp_Maximum] = "maximum";
357
358
  names[TOp_Minimum] = "minimum";
358
359
  names[TOp_SquaredDifference] = "squared_difference";
360
+ names[TOp_LogAddExp] = "logaddexp";
359
361
  names[TOp_And] = "logical_and";
360
362
  names[TOp_Or] = "logical_or";
361
363
  // The names for the unary funcs matter:
@@ -34,6 +34,7 @@ enum RawOp {
34
34
  TOp_Maximum,
35
35
  TOp_Minimum,
36
36
  TOp_SquaredDifference,
37
+ TOp_LogAddExp,
37
38
 
38
39
  TOp_And,
39
40
  TOp_Or,
@@ -1468,6 +1468,7 @@ static PyObject* _pyTensorCompareOrCombine(PyObject *self, PyObject *args, PyObj
1468
1468
  kindToCombineFunc["logical_and"] = TOp_And;
1469
1469
  kindToCombineFunc["logical_or"] = TOp_Or;
1470
1470
  kindToCombineFunc["squared_difference"] = TOp_SquaredDifference;
1471
+ kindToCombineFunc["logaddexp"] = TOp_LogAddExp;
1471
1472
  }
1472
1473
 
1473
1474
  auto it = isCompare ? kindToCompareFunc.find(kind) : kindToCombineFunc.find(kind);
returnn/frontend/math_.py CHANGED
@@ -37,6 +37,7 @@ __all__ = [
37
37
  "logical_not",
38
38
  "opt_logical_or",
39
39
  "opt_logical_and",
40
+ "log_add_exp",
40
41
  "is_finite",
41
42
  "is_infinite",
42
43
  "is_neg_infinite",
@@ -173,7 +174,8 @@ def combine(
173
174
  """
174
175
  :param a:
175
176
  :param kind: "add"|"+", "sub"|"-", "mul"|"*", "truediv"|"/", "floordiv"|"//", "mod"|"%", "pow"|"**",
176
- "max"|"maximum", "min"|"minimum", "logical_and", "logical_or", "squared_difference"
177
+ "max"|"maximum", "min"|"minimum", "logical_and", "logical_or", "squared_difference",
178
+ "logaddexp"
177
179
  :param b:
178
180
  :param allow_broadcast_all_sources: if True, it is allowed that neither a nor b has all dims of the result.
179
181
  Not needed when out_dims is specified explicitly.
@@ -364,6 +366,16 @@ def opt_logical_and(a: Union[Tensor, bool], b: Union[Tensor, bool]) -> Union[Ten
364
366
  return combine(a, "logical_and", b)
365
367
 
366
368
 
369
+ def log_add_exp(a: Tensor, b: Tensor) -> Tensor:
370
+ """
371
+ Computes log(exp(a) + exp(b)) in a numerically stable way.
372
+ This is useful for log probabilities, e.g. in beam search.
373
+
374
+ See also: func:`reduce_logsumexp`.
375
+ """
376
+ return combine(a, "logaddexp", b)
377
+
378
+
367
379
  def is_finite(a: Tensor) -> Tensor:
368
380
  """is finite"""
369
381
  # noinspection PyProtectedMember
@@ -99,6 +99,8 @@ def reduce_logsumexp(source: Tensor[T], *, axis: Union[Dim, Sequence[Dim]], use_
99
99
  """
100
100
  Reduce the tensor along the given axis
101
101
 
102
+ Also see :func:`log_add_exp`.
103
+
102
104
  :param source:
103
105
  :param axis:
104
106
  :param use_mask: if True (default), use the time mask (part of dim tag) to ignore padding frames
@@ -247,7 +249,7 @@ class RunningMean(rf.Module):
247
249
  """
248
250
 
249
251
  def _update_running_stats():
250
- assert all(d in self.shape for d in x.dims)
252
+ assert all(d in x.dims for d in self.shape)
251
253
  x_ = rf.reduce_mean(x, axis=[d for d in x.dims if d not in self.shape])
252
254
  self.mean.assign_add(self.alpha * (x_ - self.mean))
253
255
 
@@ -9607,6 +9607,7 @@ class CombineLayer(LayerBase):
9607
9607
  `maximum`, `minimum`,
9608
9608
  `logical_and`, `logical_or`,
9609
9609
  `squared_difference`,
9610
+ `logaddexp`,
9610
9611
  or `eval`,
9611
9612
  or any function in the tf.math or tf namespace.
9612
9613
  :param list[LayerBase] sources:
@@ -9814,6 +9815,10 @@ class CombineLayer(LayerBase):
9814
9815
  assert kind == "eval" and eval_str
9815
9816
  return self._op_kind_eval(sources, eval_str=eval_str, eval_locals=eval_locals)
9816
9817
 
9818
+ if hasattr(self, "_op_kind_%s" % kind):
9819
+ func = getattr(self, "_op_kind_%s" % kind)
9820
+ return func(sources)
9821
+
9817
9822
  kind = {
9818
9823
  "+": "add",
9819
9824
  "-": "subtract",
@@ -9823,16 +9828,18 @@ class CombineLayer(LayerBase):
9823
9828
  "sub": "subtract",
9824
9829
  "mul": "multiply",
9825
9830
  }.get(kind, kind)
9831
+
9826
9832
  if hasattr(tf, "math") and hasattr(tf.math, kind):
9827
9833
  tf_func = getattr(tf.math, kind)
9828
9834
  elif hasattr(tf, kind):
9829
9835
  tf_func = getattr(tf, kind)
9836
+ elif hasattr(tf, "keras") and hasattr(tf.keras, "ops") and hasattr(tf.keras.ops, kind):
9837
+ tf_func = getattr(tf.keras.ops, kind)
9838
+ elif hasattr(tf, "experimental") and hasattr(tf.experimental, "numpy") and hasattr(tf.experimental.numpy, kind):
9839
+ tf_func = getattr(tf.experimental.numpy, kind)
9830
9840
  else:
9831
- tf_func = None
9832
- if tf_func:
9833
- return self._op_dense_fn(sources, tf_func, self.output)
9834
-
9835
- return getattr(self, "_op_kind_%s" % kind)(sources)
9841
+ raise ValueError(f"{self}: unknown kind {kind!r}")
9842
+ return self._op_dense_fn(sources, tf_func, self.output)
9836
9843
 
9837
9844
 
9838
9845
  class EvalLayer(CombineLayer):
@@ -10657,7 +10664,7 @@ class SearchSortedLayer(LayerBase):
10657
10664
  transposed_values_data = values_data.copy_transpose(perm=values_batch_axes + values_non_batch_axes) # [B,F]
10658
10665
  x = transposed_sorted_data.placeholder # [B,T]
10659
10666
  if transposed_sorted_data.dims[-1].need_masking():
10660
- from returnn.tf.util.basic import where_bc, sequence_mask
10667
+ from returnn.tf.util.basic import where_bc
10661
10668
 
10662
10669
  seq_mask = transposed_sorted_data.get_sequence_mask_broadcast(axis=-1)
10663
10670
  x = where_bc(seq_mask, x, x.dtype.max) # note: this is not correct if values contains x.dtype.max
@@ -840,7 +840,8 @@ class TorchBackend(Backend[torch.Tensor]):
840
840
  """
841
841
  :param a:
842
842
  :param kind: "add", "sub", "mul", "truediv", "floordiv", "mod", "pow",
843
- "maximum", "minimum", "logical_and", "logical_or", "squared_difference"
843
+ "maximum", "minimum", "logical_and", "logical_or", "squared_difference",
844
+ "logaddexp"
844
845
  :param b:
845
846
  :return: a `kind` b
846
847
  """
@@ -9,7 +9,7 @@ See https://github.com/rwth-i6/returnn/issues/1519 for initial discussion.
9
9
  Main class is :class:`FileCache`.
10
10
  """
11
11
 
12
- from typing import Any, Collection, Iterable, List, Optional, Tuple, Union
12
+ from typing import Any, Collection, Dict, Iterable, List, Optional, Tuple, Union
13
13
  import errno
14
14
  import os
15
15
  import pathlib
@@ -20,7 +20,7 @@ from dataclasses import dataclass
20
20
  from collections import defaultdict
21
21
  from contextlib import contextmanager
22
22
  import json
23
- from threading import Thread, Event
23
+ from threading import Lock, Thread, Event
24
24
  from returnn.config import Config, get_global_config
25
25
  from .basic import expand_env_vars, LockFile, human_bytes_size
26
26
 
@@ -527,6 +527,8 @@ class _TouchFilesThread(Thread):
527
527
  super().__init__(daemon=True)
528
528
  self.stop = Event()
529
529
  self.files = defaultdict(int) # usage counter
530
+ self.files_lock = Lock() # lock for self.files/self.locks
531
+ self.locks: Dict[str, Lock] = {} # filename -> lock
530
532
  self.interval = interval
531
533
  self.cache_base_dir = cache_base_dir
532
534
  self._is_started = False # careful: `_started` is already a member of the base class
@@ -534,16 +536,17 @@ class _TouchFilesThread(Thread):
534
536
  def run(self):
535
537
  """thread main loop"""
536
538
  while True:
537
- all_files = {} # dict to have order deterministic
538
- for filename in self.files.copy(): # copy dict under GIL to avoid modifications during iteration
539
- all_files[filename] = True
540
- all_files.update({k: True for k in _all_parent_dirs(filename, base_dir=self.cache_base_dir)})
541
- for filename in all_files:
542
- try:
543
- os.utime(filename, None)
544
- except Exception as exc:
545
- print(f"FileCache: failed updating mtime of {filename}: {exc}")
546
- raise
539
+ # locks dict copied under GIL
540
+ locks = self.locks.copy()
541
+ for filename, lock in locks.items():
542
+ with lock:
543
+ if filename not in self.files:
544
+ continue
545
+ try:
546
+ os.utime(filename, None)
547
+ except Exception as exc:
548
+ print(f"FileCache: failed updating mtime of {filename}: {exc}")
549
+ raise
547
550
  if self.stop.wait(self.interval):
548
551
  return
549
552
 
@@ -560,18 +563,26 @@ class _TouchFilesThread(Thread):
560
563
  to_add = [to_add]
561
564
  assert isinstance(to_add, Iterable)
562
565
  self.start_once()
563
- for file in to_add:
564
- self.files[file] += 1
566
+ # we track the parent directories as well and give them their own locks to be
567
+ # able to synchronize their deletion with the touch thread
568
+ files_to_iter = _files_with_parents(to_add, base_dir=self.cache_base_dir)
569
+ with self.files_lock: # to avoid that we end up with duplicate locks
570
+ for file, count in files_to_iter.items():
571
+ self.files[file] += count
572
+ if file not in self.locks:
573
+ self.locks[file] = Lock()
565
574
 
566
575
  def files_remove(self, to_remove: Union[str, Iterable[str]]):
567
576
  """remove"""
568
577
  if isinstance(to_remove, str):
569
578
  to_remove = [to_remove]
570
579
  assert isinstance(to_remove, Iterable)
571
- for filename in to_remove:
572
- self.files[filename] -= 1
573
- if self.files[filename] <= 0:
574
- del self.files[filename]
580
+ for file, count in _files_with_parents(to_remove, base_dir=self.cache_base_dir).items():
581
+ with self.locks[file], self.files_lock:
582
+ self.files[file] -= count
583
+ if self.files[file] <= 0:
584
+ del self.files[file]
585
+ del self.locks[file]
575
586
 
576
587
  @contextmanager
577
588
  def files_added_context(self, files: Collection[str]):
@@ -592,3 +603,12 @@ def _all_parent_dirs(filename: str, *, base_dir: str) -> List[str]:
592
603
  break
593
604
  dirs.append(filename)
594
605
  return dirs
606
+
607
+
608
+ def _files_with_parents(filenames: Iterable[str], *, base_dir: str) -> Dict[str, int]:
609
+ res = defaultdict(int) # dict to have order deterministic
610
+ for fn in filenames:
611
+ res[fn] += 1
612
+ for fn_ in _all_parent_dirs(fn, base_dir=base_dir):
613
+ res[fn_] += 1
614
+ return res
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: returnn
3
- Version: 1.20250701.131223
3
+ Version: 1.20250703.183400
4
4
  Summary: The RWTH extensible training framework for universal recurrent neural networks
5
5
  Home-page: https://github.com/rwth-i6/returnn/
6
6
  Author: Albert Zeyer
@@ -1,9 +1,9 @@
1
- returnn/PKG-INFO,sha256=HhZ81gqzWBl944H0m1ciKxs-C22Zrwl8-M-y26f3K3g,5215
1
+ returnn/PKG-INFO,sha256=6b050edAnN_IixiTU7cKY9Y7GsV2u54Go4pMebtP1LA,5215
2
2
  returnn/__init__.py,sha256=biBtRsM0WZ406vShaeH-9WFoqJ8XwTbn6g0EeFJ7l8E,1012
3
3
  returnn/__main__.py,sha256=lHyZcu_0yc9f7Vf_Kfdy9PmeU0T76XVXnpalHi5WKro,31740
4
4
  returnn/__old_mod_loader__.py,sha256=nvsNY-xELdS_IPNkv66Q9Rmvg4dbGW0-EBRDcCmctos,7654
5
5
  returnn/__setup__.py,sha256=22kQn2fh11iPM0hLb2Fy5sLmoU1JGvmDxXRYuRgQkwU,4659
6
- returnn/_setup_info_generated.py,sha256=_rH7q_mBZ72AY8bymhQmw0zMby7RJjzhyKawprtoaGU,77
6
+ returnn/_setup_info_generated.py,sha256=W3RqBPspEAo2psz8RlzIp3A0pBAmerlhlAfLeZuEUy8,77
7
7
  returnn/config.py,sha256=3tmKhB6FnQZaNdtcYsiB61JnEY--iZ2qmJ4yq0b6tE0,29140
8
8
  returnn/forward_iface.py,sha256=A_OJiaXsX4MlXQRzST86ylyxSUZbC402PQL1REcqHjM,911
9
9
  returnn/learning_rate_control.py,sha256=ZvWryAn_tv9DhV8sh1LV3eE34Yltl3On3mYZAG4hR9s,34684
@@ -75,7 +75,7 @@ returnn/extern/graph_editor/subgraph.py,sha256=q9o0zVBLDrTIidaXg5WG5daDW0mLbwv2J
75
75
  returnn/extern/graph_editor/transform.py,sha256=qMGSenpbAnGqdG6QP6iWjlm6_ccySYJaZKOoAj1dbOM,29348
76
76
  returnn/extern/graph_editor/util.py,sha256=HfRbyQPmQ6_n5-O-096n0KeJtllQXFtaurpeJS_URZ0,18706
77
77
  returnn/frontend/__init__.py,sha256=2aS7nbxXniIrBp2DODl0xN0f3IJ_dX4Bi9ZlR7W5_DE,1472
78
- returnn/frontend/_backend.py,sha256=w-Xtn-2-uu0AGL0VAjP5QFuh_tpqX2FiSyLkPvZ5bPY,50446
78
+ returnn/frontend/_backend.py,sha256=pAnVAbZhIGKD-10tp0Mx7AO1GZNghYu7AVAPhiimN-k,50471
79
79
  returnn/frontend/_cache.py,sha256=JAhi7L-raQ3A-NC3JUYDtdRTwT3BGJJGGZxrZ8MfEWQ,8403
80
80
  returnn/frontend/_numpy_backend.py,sha256=fZjks7p3dgxVZ6tSDazTTgBxNjJqXjfqgw_7mA7rDEE,9066
81
81
  returnn/frontend/_random_journal.py,sha256=_ktP_mjgx8vtQQGX_DofdhewJj0aPiczefTWeemPkmo,5457
@@ -101,7 +101,7 @@ returnn/frontend/label_smoothing.py,sha256=lxmaowNr61sCMzMewqHhu1r0CcklYfhLXlFnB
101
101
  returnn/frontend/linear.py,sha256=xRUjnkD3MTWDezSaYATBYJQ2fa1RhKMNrTuhC54hhVs,2252
102
102
  returnn/frontend/loop.py,sha256=t-z6ke1X03I2aPUEqLYmVZWyMzfW3IedFvKUGc-TCX8,16160
103
103
  returnn/frontend/loss.py,sha256=uSvou2MPd13JiLAg_OIQ3AyyLvD3RHjMEVgFEN0gKqU,7440
104
- returnn/frontend/math_.py,sha256=KlJxdIib8ENlid7cc4lcwHv5e21tzTjTEV8VgEDAijo,16984
104
+ returnn/frontend/math_.py,sha256=A_RkZ5lH2uXMchfPIH3itraWtMNNCVckQHHpf7aIIZQ,17295
105
105
  returnn/frontend/matmul.py,sha256=xkueyxzSDz8MsYaWxPSjmV2Yy-tcaiOQDXbFt1IQM2A,1944
106
106
  returnn/frontend/module.py,sha256=219rh5mE0CD0-NdxXLsKyhv3BNtOI9jSyiI1Rb8MOyU,10700
107
107
  returnn/frontend/nested.py,sha256=P84u_cjoYdYRJ_0Cbt0vlKXxskmXTDfsnw_vFCCNKtU,15107
@@ -112,7 +112,7 @@ returnn/frontend/parametrize.py,sha256=VhgTEP7ehON950Q4bkCy8rvg9641moEKAXn0XzomK
112
112
  returnn/frontend/piecewise_linear.py,sha256=TdL6wzop8P1dcIZwkEbJFvSUZSI1cbhS3XKzlWQkEVI,1964
113
113
  returnn/frontend/rand.py,sha256=Levgf5VtOOBKDSgz0869Jf3VW4BWxYZuRXsa_fOxNI4,12969
114
114
  returnn/frontend/rec.py,sha256=6YSsSG7fdtfvvg24vmexSg8R2aVCcKHBdGLh-Mgn9Co,8037
115
- returnn/frontend/reduce.py,sha256=xvxN_h3LsMJdmT0IbW4nOf8qFhckuAniIhD9PalO6j0,10305
115
+ returnn/frontend/reduce.py,sha256=gRSvBJZNHa757IqBxGw4hu5eiO3pjie_ptEwUXHLSCs,10340
116
116
  returnn/frontend/run_ctx.py,sha256=yyOMUCKTOe19C4z2Nfly4YCLBmQ9ihip6nGrkW-Y6qg,23789
117
117
  returnn/frontend/signal.py,sha256=hfDipDhO0n9nXhGy7txwYUNbvg28NqkFq9p0Jq46f9c,4411
118
118
  returnn/frontend/state.py,sha256=EePdrx6PtWL4mJ2XZmGlh5dl4nq6G9wZpqP4hdDEzfY,2935
@@ -122,10 +122,10 @@ returnn/frontend/types.py,sha256=r-QsxPQyFSr9WwCRzqTn_X5jQLbjthrtjHavY8XIDmk,109
122
122
  returnn/frontend/_native/__init__.py,sha256=fVjazAujt0rdICXZL-GgW1sjFeL1HB4NPuy2m5rmMsc,6480
123
123
  returnn/frontend/_native/backend.cpp,sha256=MeHczHypwj_ncntOxRqanK8SqGyV9Eq1X0cpMWb_WII,4768
124
124
  returnn/frontend/_native/backend.hpp,sha256=Wq80dcEzXfRNxGOXFnIgHllkiv1rDi3KpHK-xxJsSDI,791
125
- returnn/frontend/_native/module.cpp,sha256=lS1Oypo3n6oCu6cxKAmqpNjSvQN9aMZIOeMec96FWYU,15626
126
- returnn/frontend/_native/module.hpp,sha256=uf4HPSTrFP2brGR_x9G5N1ZlZ-ok5GakMbNo4LbqxUg,6670
125
+ returnn/frontend/_native/module.cpp,sha256=9BCUoDTZDJ6hlXp4pUus1BlN7-oxcRy6tK9ctyCkwk0,15709
126
+ returnn/frontend/_native/module.hpp,sha256=iv4jvQidLaE8uC-YbaYjiXONTL_Pq7WUQKQ5MdFpdIs,6689
127
127
  returnn/frontend/_native/py_utils.hpp,sha256=vcxKGmOyDRuwsmmSEjoaCJyKMy1BNYoGlso2pZu7VoE,3139
128
- returnn/frontend/_native/tensor_ops.cpp,sha256=bYtwwn_NeJfAEHWYPEJlkoLDKt9baZ3RA8av7gtz2qc,70246
128
+ returnn/frontend/_native/tensor_ops.cpp,sha256=bA4Gf-q8cVENL441r1IYVd44EcUsV-eELyDzqmnCuw0,70302
129
129
  returnn/frontend/_native/tensor_ops.hpp,sha256=dDqvUejRNHjItnmOP5aHyAQbAmXmXoDVXSe3tveEU8A,3732
130
130
  returnn/frontend/audio/__init__.py,sha256=8mahwucBje8qHKw0bOvoySlvvD0rFKxviSvcAHSjiJY,67
131
131
  returnn/frontend/audio/mel.py,sha256=LNzC9aWWgLqua34bwxA--M9shtLlePfwLQ-HpvP2o54,7884
@@ -193,7 +193,7 @@ returnn/tf/frontend_low_level/__init__.py,sha256=34469k3KzMUIGowxReOZnbf6WdTjxY7
193
193
  returnn/tf/frontend_low_level/_backend.py,sha256=JwwRRIGnElqBC4bTImdB7w3U1u_SJESeZHYLmq86wog,24479
194
194
  returnn/tf/layers/__init__.py,sha256=Ngu-X84nWFgz7ndDu88DqoZ-5lUMMTQWH4g7N8pSoCg,72
195
195
  returnn/tf/layers/base.py,sha256=sUxEfh6WxaHWHG7O3cfxB6gG6YpEHkFKUJVayKvTBSI,152968
196
- returnn/tf/layers/basic.py,sha256=KOmEKBz5idk_-zL1XlKWmL4AIbgpwbHkGKZrszYrlM4,614963
196
+ returnn/tf/layers/basic.py,sha256=zHDPLP97jSvYYZcMPqQVOVxFk6I1BfXd71XVfs0VIkQ,615386
197
197
  returnn/tf/layers/rec.py,sha256=3f6M_5aAMPvx7aAHdPV3VSFRHf7tjpp8lrXSzmk1I5c,548435
198
198
  returnn/tf/layers/segmental_model.py,sha256=wUyDZGr-eTVIIQWcsHLML0wtOxuWn_NFKOIrUKQcvoI,21515
199
199
  returnn/tf/layers/signal_processing.py,sha256=vRlkN7k7otk9_Qdv0qr_l6V0VT5Q6dO2MxwZWb2HH2M,52693
@@ -216,7 +216,7 @@ returnn/torch/data/queued_data_iter.py,sha256=PoOsGHdHVZjTmcyfq_ZOw--P6hyfTdmAWI
216
216
  returnn/torch/data/returnn_dataset_wrapper.py,sha256=2CaDapzrlqahANuq-nyVAtv5ENHuM8A7okORwYJDisg,8006
217
217
  returnn/torch/data/tensor_utils.py,sha256=-Teqi--LLbt6q_5mDRdoHZHmPgSdC83W706ukif_YiU,1284
218
218
  returnn/torch/frontend/__init__.py,sha256=AA48HZnC17ASuKA0EWy8loZ-Bib_yUtqF4T1wYvjst4,62
219
- returnn/torch/frontend/_backend.py,sha256=L0SOhpm6c-9wbAUJIAgf9Qo8HVIyka2aHyAIJVt6W-A,101850
219
+ returnn/torch/frontend/_backend.py,sha256=a9qcpUJrSDtH7KR6ZIpB4sijm6ztRlZ4myAe2P0dtaE,101875
220
220
  returnn/torch/frontend/_rand.py,sha256=1JgIkV2XmpgJD86zXZ-NCAe-QuoP2swr6NaS1oz3Qa8,1830
221
221
  returnn/torch/frontend/bridge.py,sha256=c_mVBCBo29sjm8Bhxarv00szwGPgxjwoIqAHOmceGQw,7842
222
222
  returnn/torch/frontend/raw_ops.py,sha256=lF0h-KtYYsdaaqQADylVZp9qzPskOOXA4MfmYDyx5IU,296
@@ -238,7 +238,7 @@ returnn/util/better_exchook.py,sha256=39yvRecluDgYhViwSkaQ8crJ_cBWI63KeEGuK4RKe5
238
238
  returnn/util/bpe.py,sha256=LWFhICZsEOnMwNws0lybPNzKRX6rSr8yKCvP65vjl9Y,19656
239
239
  returnn/util/debug.py,sha256=wuRzdg9zB84WWCGyTjmRR_zYypu8gXxlc0nZ6si9OC8,28224
240
240
  returnn/util/debug_helpers.py,sha256=0EINLK4uLtoSt5_kHs1M2NIFpMd0S7i4c4rx90U4fJk,2914
241
- returnn/util/file_cache.py,sha256=wMZvzFGfdy_Kz5cdJCvp8KYX5lw2-N9lIHIuB0ic2qA,26646
241
+ returnn/util/file_cache.py,sha256=15p0daE4KmzO5OvkhDr9cA_zJNbJrX98mnL-2fECtmM,27575
242
242
  returnn/util/fsa.py,sha256=k2lJ8tyf_g44Xk1EPVLwDwpP4spoMTqIigDVOWocQHY,59177
243
243
  returnn/util/literal_py_to_pickle.py,sha256=3dnjWPeeiDT2xp4bRDgIf9yddx7b1AG7mOKEn_jiSl8,2173
244
244
  returnn/util/lru_cache.py,sha256=7Q5H3a8b07E8e1iB7PA9jCpRnxMJZOFS2KO07cy0gqk,11446
@@ -253,8 +253,8 @@ returnn/util/sig_proc.py,sha256=Tjz0VOAVyqu2qDCF5HZ1JjALjcFsHcNkcd96WgZeKfE,7265
253
253
  returnn/util/task_system.py,sha256=y4sMVXQ25Qd2z0rx03uOlXlkE-jbCYC1Sjfn-XlraVU,26003
254
254
  returnn/util/train_proc_manager.py,sha256=Pjht28k6uz6BNQ47uW6Gf880iyq5q4wx7P_K2tmoAM8,3266
255
255
  returnn/util/watch_memory.py,sha256=BR5P2kvBN6UI81cE0_1WAA6Hd1SByLbBaiDxvLhPOew,4213
256
- returnn-1.20250701.131223.dist-info/LICENSE,sha256=ywBD_U2aD4vpuoIgNAsjIGBYydl0tVKll3De0Z8s77c,11041
257
- returnn-1.20250701.131223.dist-info/METADATA,sha256=HhZ81gqzWBl944H0m1ciKxs-C22Zrwl8-M-y26f3K3g,5215
258
- returnn-1.20250701.131223.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
259
- returnn-1.20250701.131223.dist-info/top_level.txt,sha256=Lsn4WZc5Pbfk0-xDQOgnFCxOoqxL4CyeM3N1TFbJncw,8
260
- returnn-1.20250701.131223.dist-info/RECORD,,
256
+ returnn-1.20250703.183400.dist-info/LICENSE,sha256=ywBD_U2aD4vpuoIgNAsjIGBYydl0tVKll3De0Z8s77c,11041
257
+ returnn-1.20250703.183400.dist-info/METADATA,sha256=6b050edAnN_IixiTU7cKY9Y7GsV2u54Go4pMebtP1LA,5215
258
+ returnn-1.20250703.183400.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
259
+ returnn-1.20250703.183400.dist-info/top_level.txt,sha256=Lsn4WZc5Pbfk0-xDQOgnFCxOoqxL4CyeM3N1TFbJncw,8
260
+ returnn-1.20250703.183400.dist-info/RECORD,,