returnn 1.20240628.104058__tar.gz → 1.20240702.142741__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of returnn might be problematic. Click here for more details.

Files changed (449) hide show
  1. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/PKG-INFO +1 -1
  2. returnn-1.20240702.142741/_setup_info_generated.py +2 -0
  3. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/generating.py +1 -1
  4. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/util/feature_extraction.py +1 -1
  5. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/sprint/control.py +12 -11
  6. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/sprint/interface.py +4 -1
  7. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/data_pipeline.py +2 -2
  8. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/hyper_param_tuning.py +1 -1
  9. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/layers/rec.py +42 -30
  10. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/layers/signal_processing.py +1 -1
  11. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/native_op.py +4 -4
  12. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/updater.py +15 -4
  13. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/util/basic.py +11 -10
  14. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/util/data.py +2 -1
  15. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/torch/data/pipeline.py +11 -1
  16. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/basic.py +3 -3
  17. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/better_exchook.py +1 -1
  18. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/debug.py +5 -5
  19. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn.egg-info/PKG-INFO +1 -1
  20. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_torch_engine.py +68 -64
  21. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/tf_inspect_checkpoint.py +2 -1
  22. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/torch_inspect_checkpoint.py +1 -1
  23. returnn-1.20240628.104058/_setup_info_generated.py +0 -2
  24. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/.editorconfig +0 -0
  25. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/.gitignore +0 -0
  26. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/.gitmodules +0 -0
  27. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/.kateconfig +0 -0
  28. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/CHANGELOG.md +0 -0
  29. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/CODEOWNERS +0 -0
  30. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/CONTRIBUTING.md +0 -0
  31. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/LICENSE +0 -0
  32. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/MANIFEST.in +0 -0
  33. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/README.rst +0 -0
  34. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/__init__.py +0 -0
  35. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/12AX.cluster_map +0 -0
  36. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/_setup_returnn_env.py +0 -0
  37. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-fwd.config +0 -0
  38. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-horovod-mpi.py +0 -0
  39. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-horovod-mpi.py.sh +0 -0
  40. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-horovod-mpi.sh +0 -0
  41. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-hyper-param-tuning.config +0 -0
  42. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-iter-dataset.py +0 -0
  43. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-list-devices.py +0 -0
  44. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-lua-torch-layer.config +0 -0
  45. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-pretrain.config +0 -0
  46. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-record-and-push-to-webserver.py +0 -0
  47. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-returnn-as-framework.py +0 -0
  48. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-rf-pt-benchmark.py +0 -0
  49. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-rf.config +0 -0
  50. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-rhn-enwik8.config +0 -0
  51. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-sprint-interface.py +0 -0
  52. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-tf-att-copy.config +0 -0
  53. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-tf-attention.config +0 -0
  54. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-tf-chunking-blstm.12ax.config +0 -0
  55. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-tf-contribrnn-lstm.12ax.config +0 -0
  56. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-tf-enc-dec.config +0 -0
  57. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-tf-hard-att-copy.config +0 -0
  58. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-tf-lstm-benchmark.py +0 -0
  59. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-tf-maxgradnorm-lstm.12ax.config +0 -0
  60. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-tf-native-lstm-lowmem.12ax.config +0 -0
  61. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-tf-native-lstm.12ax.config +0 -0
  62. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-tf-native-lstm2.12ax.config +0 -0
  63. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-tf-native-lstm2.12ax.tuned.config +0 -0
  64. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-tf-neural-transducer.12ax.config +0 -0
  65. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-tf-rec-explicit-lstm.config +0 -0
  66. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-tf-rec-explicit-rnn.config +0 -0
  67. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-tf-rec-self-att.config +0 -0
  68. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-tf-search-compiled-graph.py +0 -0
  69. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-tf-vanilla-lstm.12ax.config +0 -0
  70. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-timit-lstm-ctc.config +0 -0
  71. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-torch.config +0 -0
  72. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo-upd-mult-model.lstm.12ax.config +0 -0
  73. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/demo.sh +0 -0
  74. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/IAM/IAM_lines/a01-000u-00.png +0 -0
  75. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/IAM/IAM_lines/a01-007-04.png +0 -0
  76. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/IAM/IAM_lines/a01-007-06.png +0 -0
  77. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/IAM/README.txt +0 -0
  78. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/IAM/chars.txt +0 -0
  79. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/IAM/config_demo +0 -0
  80. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/IAM/config_fwd +0 -0
  81. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/IAM/config_real +0 -0
  82. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/IAM/create_IAM_dataset.py +0 -0
  83. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/IAM/decode.py +0 -0
  84. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/IAM/features/raw/demo.h5 +0 -0
  85. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/IAM/go.sh +0 -0
  86. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/IAM/lines.txt +0 -0
  87. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/IAM/split/eval.txt +0 -0
  88. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/IAM/split/train.txt +0 -0
  89. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/IAM/split/valid.txt +0 -0
  90. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/README.md +0 -0
  91. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/artificial/create_test_h5.py +0 -0
  92. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/artificial/forwardconfig +0 -0
  93. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/artificial/go.sh +0 -0
  94. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/artificial/trainconfig +0 -0
  95. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/artificial_rgb/create_test_h5.py +0 -0
  96. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/artificial_rgb/forwardconfig +0 -0
  97. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/artificial_rgb/go.sh +0 -0
  98. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/demos/mdlstm/artificial_rgb/trainconfig +0 -0
  99. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/pyproject.toml +0 -0
  100. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/requirements.txt +0 -0
  101. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/__init__.py +0 -0
  102. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/__main__.py +0 -0
  103. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/__old_mod_loader__.py +0 -0
  104. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/__setup__.py +0 -0
  105. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/config.py +0 -0
  106. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/__init__.py +0 -0
  107. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/audio.py +0 -0
  108. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/basic.py +0 -0
  109. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/bundle_file.py +0 -0
  110. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/cached.py +0 -0
  111. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/cached2.py +0 -0
  112. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/distrib_files.py +0 -0
  113. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/hdf.py +0 -0
  114. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/lm.py +0 -0
  115. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/map.py +0 -0
  116. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/meta.py +0 -0
  117. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/multi_proc.py +0 -0
  118. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/normalization_data.py +0 -0
  119. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/numpy_dump.py +0 -0
  120. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/raw_wav.py +0 -0
  121. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/sprint.py +0 -0
  122. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/stereo.py +0 -0
  123. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/util/__init__.py +0 -0
  124. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/util/strings.py +0 -0
  125. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/datasets/util/vocabulary.py +0 -0
  126. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/engine/__init__.py +0 -0
  127. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/engine/base.py +0 -0
  128. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/engine/batch.py +0 -0
  129. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/__init__.py +0 -0
  130. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/__main__.py +0 -0
  131. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/.git +0 -0
  132. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/.gitignore +0 -0
  133. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/LICENSE +0 -0
  134. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/README.md +0 -0
  135. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/aligner.gif +0 -0
  136. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/check.png +0 -0
  137. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/core.cu +0 -0
  138. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/core.h +0 -0
  139. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/core_cpu.cpp +0 -0
  140. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/pytorch_binding/LICENSE +0 -0
  141. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/pytorch_binding/MANIFEST.in +0 -0
  142. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/pytorch_binding/README.md +0 -0
  143. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/pytorch_binding/binding.cpp +0 -0
  144. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/pytorch_binding/core.cu +0 -0
  145. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/pytorch_binding/core.h +0 -0
  146. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/pytorch_binding/requirements.txt +0 -0
  147. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/pytorch_binding/setup.py +0 -0
  148. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/pytorch_binding/warp_rna/__init__.py +0 -0
  149. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/pytorch_binding/warp_rna/test.py +0 -0
  150. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/ref_rna.py +0 -0
  151. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/setup.py +0 -0
  152. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/src/warp_rna_op.cc +0 -0
  153. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/src/warp_rna_op_kernel_tmpl.h +0 -0
  154. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/warp_rna/__init__.py +0 -0
  155. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/WarpRna/warp-rna/test.cpp +0 -0
  156. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/__init__.py +0 -0
  157. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/graph_editor/README.md +0 -0
  158. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/graph_editor/__init__.py +0 -0
  159. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/graph_editor/edit.py +0 -0
  160. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/graph_editor/reroute.py +0 -0
  161. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/graph_editor/select.py +0 -0
  162. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/graph_editor/subgraph.py +0 -0
  163. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/graph_editor/transform.py +0 -0
  164. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/extern/graph_editor/util.py +0 -0
  165. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/forward_iface.py +0 -0
  166. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/__init__.py +0 -0
  167. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/_backend.py +0 -0
  168. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/_native/__init__.py +0 -0
  169. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/_native/backend.cpp +0 -0
  170. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/_native/backend.hpp +0 -0
  171. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/_native/module.cpp +0 -0
  172. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/_native/module.hpp +0 -0
  173. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/_native/py_utils.hpp +0 -0
  174. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/_native/tensor_ops.cpp +0 -0
  175. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/_native/tensor_ops.hpp +0 -0
  176. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/_numpy_backend.py +0 -0
  177. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/_random_journal.py +0 -0
  178. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/_utils.py +0 -0
  179. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/array_.py +0 -0
  180. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/attention.py +0 -0
  181. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/audio/__init__.py +0 -0
  182. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/audio/mel.py +0 -0
  183. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/audio/specaugment.py +0 -0
  184. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/backend.py +0 -0
  185. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/build_from_dict.py +0 -0
  186. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/cond.py +0 -0
  187. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/const.py +0 -0
  188. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/container.py +0 -0
  189. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/control_flow_ctx.py +0 -0
  190. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/conv.py +0 -0
  191. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/decoder/__init__.py +0 -0
  192. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/decoder/transformer.py +0 -0
  193. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/device.py +0 -0
  194. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/dims.py +0 -0
  195. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/dropout.py +0 -0
  196. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/dtype.py +0 -0
  197. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/encoder/__init__.py +0 -0
  198. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/encoder/base.py +0 -0
  199. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/encoder/conformer.py +0 -0
  200. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/gradient.py +0 -0
  201. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/graph.py +0 -0
  202. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/hooks.py +0 -0
  203. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/init.py +0 -0
  204. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/label_smoothing.py +0 -0
  205. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/linear.py +0 -0
  206. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/loop.py +0 -0
  207. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/loss.py +0 -0
  208. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/math_.py +0 -0
  209. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/matmul.py +0 -0
  210. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/module.py +0 -0
  211. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/normalization.py +0 -0
  212. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/parameter.py +0 -0
  213. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/piecewise_linear.py +0 -0
  214. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/rand.py +0 -0
  215. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/rec.py +0 -0
  216. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/reduce.py +0 -0
  217. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/run_ctx.py +0 -0
  218. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/signal.py +0 -0
  219. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/state.py +0 -0
  220. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/stepwise_scheduler.py +0 -0
  221. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/tensor_array.py +0 -0
  222. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/frontend/types.py +0 -0
  223. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/import_/__init__.py +0 -0
  224. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/import_/common.py +0 -0
  225. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/import_/git.py +0 -0
  226. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/import_/import_.py +0 -0
  227. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/learning_rate_control.py +0 -0
  228. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/log.py +0 -0
  229. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/native_op.cpp +0 -0
  230. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/native_op.py +0 -0
  231. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/pretrain.py +0 -0
  232. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/sprint/__init__.py +0 -0
  233. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/sprint/cache.py +0 -0
  234. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/sprint/error_signals.py +0 -0
  235. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/sprint/extern_interface.py +0 -0
  236. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tensor/README.md +0 -0
  237. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tensor/__init__.py +0 -0
  238. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tensor/_dim_extra.py +0 -0
  239. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tensor/_tensor_extra.py +0 -0
  240. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tensor/_tensor_mixin_base.py +0 -0
  241. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tensor/_tensor_op_overloads.py +0 -0
  242. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tensor/control_flow_ctx.py +0 -0
  243. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tensor/dim.py +0 -0
  244. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tensor/marked_dim.py +0 -0
  245. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tensor/tensor.py +0 -0
  246. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tensor/tensor_dict.py +0 -0
  247. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tensor/utils.py +0 -0
  248. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/__init__.py +0 -0
  249. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/compat.py +0 -0
  250. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/distributed.py +0 -0
  251. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/engine.py +0 -0
  252. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/frontend_layers/README.md +0 -0
  253. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/frontend_layers/__init__.py +0 -0
  254. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/frontend_layers/_backend.py +0 -0
  255. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/frontend_layers/_utils.py +0 -0
  256. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/frontend_layers/cond.py +0 -0
  257. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/frontend_layers/config_entry_points.py +0 -0
  258. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/frontend_layers/debug_eager_mode.py +0 -0
  259. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/frontend_layers/dims.py +0 -0
  260. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/frontend_layers/layer.py +0 -0
  261. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/frontend_layers/loop.py +0 -0
  262. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/frontend_layers/make_layer.py +0 -0
  263. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/frontend_layers/masked_computation.py +0 -0
  264. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/frontend_layers/parameter_assign.py +0 -0
  265. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/frontend_layers/prev_tensor_ref.py +0 -0
  266. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/frontend_low_level/__init__.py +0 -0
  267. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/frontend_low_level/_backend.py +0 -0
  268. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/horovod.py +0 -0
  269. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/layers/__init__.py +0 -0
  270. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/layers/base.py +0 -0
  271. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/layers/basic.py +0 -0
  272. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/layers/segmental_model.py +0 -0
  273. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/layers/variable.py +0 -0
  274. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/network.py +0 -0
  275. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/sprint.py +0 -0
  276. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/util/__init__.py +0 -0
  277. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/util/gradient_checkpoint.py +0 -0
  278. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/util/ken_lm.py +0 -0
  279. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/tf/util/open_fst.py +0 -0
  280. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/torch/README.md +0 -0
  281. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/torch/__init__.py +0 -0
  282. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/torch/data/__init__.py +0 -0
  283. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/torch/data/extern_data.py +0 -0
  284. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/torch/data/queued_data_iter.py +0 -0
  285. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/torch/data/returnn_dataset_wrapper.py +0 -0
  286. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/torch/data/tensor_utils.py +0 -0
  287. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/torch/distributed.py +0 -0
  288. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/torch/engine.py +0 -0
  289. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/torch/frontend/__init__.py +0 -0
  290. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/torch/frontend/_backend.py +0 -0
  291. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/torch/frontend/_rand.py +0 -0
  292. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/torch/frontend/bridge.py +0 -0
  293. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/torch/frontend/raw_ops.py +0 -0
  294. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/torch/updater.py +0 -0
  295. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/torch/util/README.md +0 -0
  296. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/torch/util/__init__.py +0 -0
  297. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/torch/util/diagnose_gpu.py +0 -0
  298. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/torch/util/scaled_gradient.py +0 -0
  299. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/__init__.py +0 -0
  300. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/bpe.py +0 -0
  301. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/debug_helpers.py +0 -0
  302. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/file_cache.py +0 -0
  303. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/fsa.py +0 -0
  304. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/literal_py_to_pickle.py +0 -0
  305. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/math.py +0 -0
  306. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/multi_proc_non_daemonic_spawn.py +0 -0
  307. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/native_code_compiler.py +0 -0
  308. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/pprint.py +0 -0
  309. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/py-to-pickle.cpp +0 -0
  310. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/py_compat.py +0 -0
  311. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/py_ext_mod_compiler.py +0 -0
  312. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/result_with_reason.py +0 -0
  313. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/sig_proc.py +0 -0
  314. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/task_system.py +0 -0
  315. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/train_proc_manager.py +0 -0
  316. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn/util/watch_memory.py +0 -0
  317. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn.egg-info/SOURCES.txt +0 -0
  318. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn.egg-info/dependency_links.txt +0 -0
  319. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/returnn.egg-info/top_level.txt +0 -0
  320. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/rnn.py +0 -0
  321. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/setup.cfg +0 -0
  322. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/setup.py +0 -0
  323. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/DummySprintExec.py +0 -0
  324. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/PyCharm-inspection-profile.xml +0 -0
  325. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/PyCharm.idea/.gitignore +0 -0
  326. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/PyCharm.idea/.name +0 -0
  327. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/PyCharm.idea/codeStyleSettings.xml +0 -0
  328. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/PyCharm.idea/codeStyles/Project.xml +0 -0
  329. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/PyCharm.idea/codeStyles/codeStyleConfig.xml +0 -0
  330. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/PyCharm.idea/inspectionProfiles/Project_Default.xml +0 -0
  331. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/PyCharm.idea/inspectionProfiles/profiles_settings.xml +0 -0
  332. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/PyCharm.idea/misc.xml +0 -0
  333. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/PyCharm.idea/modules.xml +0 -0
  334. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/PyCharm.idea/returnn.iml +0 -0
  335. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/PyCharm.idea/scopes/scope_settings.xml +0 -0
  336. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/_set_num_threads1.py +0 -0
  337. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/_setup_returnn_env.py +0 -0
  338. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/_setup_test_env.py +0 -0
  339. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/bpe-unicode-demo.codes +0 -0
  340. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/bpe-unicode-demo.vocab +0 -0
  341. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/lexicon_opt.fst +0 -0
  342. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/lexicon_opt.isyms +0 -0
  343. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/lexicon_opt.jpg +0 -0
  344. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/lexicon_opt.osyms +0 -0
  345. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/lint_common.py +0 -0
  346. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/pycharm-inspect.py +0 -0
  347. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/pylint.py +0 -0
  348. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/returnn-as-framework.py +0 -0
  349. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/rf_utils.py +0 -0
  350. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/spelling.dic +0 -0
  351. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_Config.py +0 -0
  352. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_Dataset.py +0 -0
  353. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_Fsa.py +0 -0
  354. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_GeneratingDataset.py +0 -0
  355. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_HDFDataset.py +0 -0
  356. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_LearningRateControl.py +0 -0
  357. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_Log.py +0 -0
  358. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_MultiProcDataset.py +0 -0
  359. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_Pretrain.py +0 -0
  360. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_ResNet.py +0 -0
  361. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_SprintDataset.py +0 -0
  362. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_SprintInterface.py +0 -0
  363. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_TFEngine.py +0 -0
  364. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_TFNativeOp.py +0 -0
  365. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_TFNetworkLayer.py +0 -0
  366. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_TFNetworkRecLayer.py +0 -0
  367. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_TFNetworkSigProcLayer.py +0 -0
  368. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_TFUpdater.py +0 -0
  369. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_TFUtil.py +0 -0
  370. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_TF_determinism.py +0 -0
  371. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_TaskSystem.py +0 -0
  372. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_TaskSystem_SharedMem.py +0 -0
  373. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_TranslationDataset.py +0 -0
  374. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_Util.py +0 -0
  375. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_demos.py +0 -0
  376. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_fork_exec.py +0 -0
  377. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_hdf_dump.py +0 -0
  378. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_rf_array.py +0 -0
  379. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_rf_attention.py +0 -0
  380. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_rf_base.py +0 -0
  381. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_rf_cond.py +0 -0
  382. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_rf_const.py +0 -0
  383. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_rf_container.py +0 -0
  384. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_rf_conv.py +0 -0
  385. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_rf_encoder_conformer.py +0 -0
  386. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_rf_gradient.py +0 -0
  387. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_rf_label_smoothing.py +0 -0
  388. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_rf_loop.py +0 -0
  389. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_rf_math.py +0 -0
  390. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_rf_normalization.py +0 -0
  391. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_rf_piecewise_linear.py +0 -0
  392. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_rf_rec.py +0 -0
  393. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_rf_reduce.py +0 -0
  394. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_rf_signal.py +0 -0
  395. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_tensor.py +0 -0
  396. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_tools.py +0 -0
  397. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_torch_dataset.py +0 -0
  398. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_torch_frontend.py +0 -0
  399. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tests/test_torch_internal_frontend.py +0 -0
  400. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/_setup_returnn_env.py +0 -0
  401. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/analyze-dataset-batches.py +0 -0
  402. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/bliss-collect-seq-lens.py +0 -0
  403. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/bliss-dump-text.py +0 -0
  404. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/bliss-get-segment-names.py +0 -0
  405. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/bliss-to-ogg-zip.py +0 -0
  406. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/bpe-create-lexicon.py +0 -0
  407. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/calculate-word-error-rate.py +0 -0
  408. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/cleanup-old-models.py +0 -0
  409. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/collect-orth-symbols.py +0 -0
  410. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/collect-words.py +0 -0
  411. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/compile_native_op.py +0 -0
  412. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/compile_tf_graph.py +0 -0
  413. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/debug-dump-search-scores.py +0 -0
  414. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/debug-plot-search-scores.py +0 -0
  415. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/dump-dataset-raw-strings.py +0 -0
  416. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/dump-dataset.py +0 -0
  417. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/dump-forward-stats.py +0 -0
  418. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/dump-forward.py +0 -0
  419. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/dump-network-json.py +0 -0
  420. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/dump-pickle.py +0 -0
  421. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/extract_state_tying_from_dataset.py +0 -0
  422. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/get-attention-weights.py +0 -0
  423. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/get-best-model-epoch.py +0 -0
  424. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/hdf_dump.py +0 -0
  425. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/hdf_dump_translation_dataset.py +0 -0
  426. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/import-blocks-mt-model.py +0 -0
  427. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/import-t2t-mt-model.py +0 -0
  428. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/lattice_rescorer/.gitignore +0 -0
  429. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/lattice_rescorer/Makefile +0 -0
  430. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/lattice_rescorer/README.md +0 -0
  431. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/lattice_rescorer/example/README.md +0 -0
  432. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/lattice_rescorer/example/libs_list +0 -0
  433. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/lattice_rescorer/example/network.040/i600_m600_m600.sgd_b16_lr0_cl2.newbobabs.config +0 -0
  434. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/lattice_rescorer/example/network.040/i600_m600_m600.sgd_b16_lr0_cl2.newbobabs.keep_over_epoch.lstm2.config +0 -0
  435. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/lattice_rescorer/example/rescore_lattice.sh +0 -0
  436. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/lattice_rescorer/example/state_vars_list +0 -0
  437. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/lattice_rescorer/example/tensor_names_list +0 -0
  438. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/lattice_rescorer/file.h +0 -0
  439. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/lattice_rescorer/htklatticerescorer.cc +0 -0
  440. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/lattice_rescorer/htklatticerescorer.h +0 -0
  441. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/lattice_rescorer/main.cc +0 -0
  442. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/lattice_rescorer/rescorer.h +0 -0
  443. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/lattice_rescorer/vocabulary.cc +0 -0
  444. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/lattice_rescorer/vocabulary.h +0 -0
  445. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/tf_avg_checkpoints.py +0 -0
  446. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/tf_inspect_summary_log.py +0 -0
  447. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/torch_avg_checkpoints.py +0 -0
  448. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/torch_export_to_onnx.py +0 -0
  449. {returnn-1.20240628.104058 → returnn-1.20240702.142741}/tools/torch_inspect_checkpoint_and_opt.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: returnn
3
- Version: 1.20240628.104058
3
+ Version: 1.20240702.142741
4
4
  Summary: The RWTH extensible training framework for universal recurrent neural networks
5
5
  Home-page: https://github.com/rwth-i6/returnn/
6
6
  Author: Albert Zeyer
@@ -0,0 +1,2 @@
1
+ version = '1.20240702.142741'
2
+ long_version = '1.20240702.142741+git.bd0a83f'
@@ -2543,7 +2543,7 @@ class Enwik8Corpus(CachedDataset2):
2543
2543
  num_test_chars = 5000000
2544
2544
 
2545
2545
  raw_data = zipfile.ZipFile(self._zip_filename).read("enwik8").decode("utf8")
2546
- raw_data = numpy.fromstring(raw_data, dtype=numpy.uint8)
2546
+ raw_data = numpy.fromstring(raw_data, dtype=numpy.uint8) # noqa
2547
2547
  unique, data = numpy.unique(raw_data, return_inverse=True)
2548
2548
 
2549
2549
  train_data = data[: -2 * num_test_chars]
@@ -523,7 +523,7 @@ def _get_random_permuted_audio(audio, sample_rate, opts, random_state):
523
523
  warnings.simplefilter("ignore")
524
524
  # Alternative: scipy.interpolate.interp2d
525
525
  factor = random_state.uniform(opts.get("rnd_zoom_lower", 0.9), opts.get("rnd_zoom_upper", 1.1))
526
- audio = scipy.ndimage.zoom(audio, factor, order=opts.get("rnd_zoom_order", 3))
526
+ audio = scipy.ndimage.zoom(audio, factor, order=opts.get("rnd_zoom_order", 3)) # noqa
527
527
  if opts.get("rnd_stretch_switch", 1.0) > 0.0:
528
528
  opts.get("rnd_stretch_lower"), opts.get("rnd_stretch_upper") # Mark as read.
529
529
  if random_state.uniform(0.0, 1.0) < opts.get("rnd_stretch_switch", 0.2):
@@ -123,6 +123,7 @@ def init(name, reference, config, sprint_unit=None, version_number=None, callbac
123
123
 
124
124
  # Start Sprint PythonSegmentOrder interface. {
125
125
 
126
+
126
127
  # Keep names for compatibility.
127
128
  # noinspection PyPep8Naming,PyUnusedLocal
128
129
  def getSegmentList(corpusName, segmentList, config, **kwargs):
@@ -268,7 +269,7 @@ class SprintNnPythonLayer:
268
269
  seg_len = errorSignalIn.shape[1]
269
270
  PythonControl.instance.set_current_seg_error_signal(seg_len=seg_len, error_signal=errorSignalIn.T)
270
271
  # must return a 1-tuple
271
- return (numpy.zeros((self.input_size, seg_len), dtype="float32"),)
272
+ return (numpy.zeros((self.input_size, seg_len), dtype="float32"),) # noqa
272
273
 
273
274
 
274
275
  # End SprintNnPythonLayer. }
@@ -400,7 +401,7 @@ class PythonControl:
400
401
  with self.cond:
401
402
  self.loss = loss
402
403
  self.error_signal = error_signal
403
- self.cond.notifyAll()
404
+ self.cond.notify_all()
404
405
 
405
406
  def _get_loss_and_error_signal_via_sprint_callback(self, seg_name, orthography, posteriors):
406
407
  """
@@ -484,13 +485,13 @@ class PythonControl:
484
485
  self.loss = None
485
486
  self.asked_for_posteriors = False
486
487
  self.notified_for_segment = False
487
- self.cond.notifyAll()
488
+ self.cond.notify_all()
488
489
  loss, error_signal = self.callback("get_loss_and_error_signal", seg_name, seg_len, posteriors)
489
490
  assert error_signal.shape == posteriors.shape
490
491
  with self.cond:
491
492
  self.control_thread__have_new_error_signal = True
492
493
  self.posteriors = None
493
- self.cond.notifyAll()
494
+ self.cond.notify_all()
494
495
  numpy_set_unused(posteriors)
495
496
  error_signal = error_signal.astype("float32", copy=False)
496
497
  return loss, error_signal
@@ -544,14 +545,14 @@ class PythonControl:
544
545
  with self.cond:
545
546
  assert not self.control_loop_started
546
547
  self.control_loop_started = True
547
- self.cond.notifyAll()
548
+ self.cond.notify_all()
548
549
  try:
549
550
  while True:
550
551
  self.handle_next()
551
552
  finally:
552
553
  with self.cond:
553
554
  self.control_loop_exited = True
554
- self.cond.notifyAll()
555
+ self.cond.notify_all()
555
556
 
556
557
  # noinspection PyMethodMayBeStatic
557
558
  def exit(self, **kwargs):
@@ -635,7 +636,7 @@ class PythonControl:
635
636
  with self.cond:
636
637
  assert self.seg_name == segment_name
637
638
  self.notified_for_segment = True
638
- self.cond.notifyAll()
639
+ self.cond.notify_all()
639
640
 
640
641
  def notify_segment_loss(self, segment_name, loss):
641
642
  """
@@ -654,7 +655,7 @@ class PythonControl:
654
655
  assert self.seg_len == seg_len
655
656
  assert self.posteriors.shape[0] == seg_len
656
657
  self.asked_for_posteriors = True
657
- self.cond.notifyAll()
658
+ self.cond.notify_all()
658
659
  return self.posteriors
659
660
 
660
661
  def set_current_seg_error_signal(self, seg_len, error_signal):
@@ -667,7 +668,7 @@ class PythonControl:
667
668
  assert error_signal.ndim == 2
668
669
  assert error_signal.shape[0] == seg_len
669
670
  self.error_signal = error_signal
670
- self.cond.notifyAll()
671
+ self.cond.notify_all()
671
672
 
672
673
  def set_current_seg_loss(self, seg_name, loss):
673
674
  """
@@ -678,7 +679,7 @@ class PythonControl:
678
679
  if seg_name:
679
680
  assert self.seg_name == seg_name
680
681
  self.loss = loss
681
- self.cond.notifyAll()
682
+ self.cond.notify_all()
682
683
 
683
684
  # noinspection PyMethodMayBeStatic
684
685
  def _default_skipped_loss(self):
@@ -702,7 +703,7 @@ class PythonControl:
702
703
  self.loss = self._default_skipped_loss()
703
704
  if self.error_signal is None:
704
705
  self.error_signal = self._default_skipped_error_signal(self.posteriors)
705
- self.cond.notifyAll()
706
+ self.cond.notify_all()
706
707
 
707
708
  def _wait_for_control_loop_error_signal(self):
708
709
  while True:
@@ -102,6 +102,7 @@ def init(name=None, sprint_unit=None, **kwargs):
102
102
  # <editor-fold desc="PythonFeatureScorer">
103
103
  # Start Sprint PythonFeatureScorer interface. {
104
104
 
105
+
105
106
  # noinspection PyShadowingNames
106
107
  def init_python_feature_scorer(config, **kwargs):
107
108
  """
@@ -324,6 +325,7 @@ class PythonFeatureScorer(object):
324
325
  # <editor-fold desc="PythonSegmentOrder">
325
326
  # Start Sprint PythonSegmentOrder interface. {
326
327
 
328
+
327
329
  # Names need to stay that way for compatibility.
328
330
  # noinspection PyPep8Naming
329
331
  def getSegmentList(corpusName, segmentList, **kwargs):
@@ -399,6 +401,7 @@ def getSegmentList(corpusName, segmentList, **kwargs):
399
401
  # <editor-fold desc="PythonTrainer">
400
402
  # Start Sprint PythonTrainer interface. {
401
403
 
404
+
402
405
  # noinspection PyPep8Naming,PyShadowingNames
403
406
  def init_python_trainer(inputDim, outputDim, config, targetMode, **kwargs):
404
407
  """
@@ -996,7 +999,7 @@ def make_criterion_class():
996
999
  errorSignal = None
997
1000
 
998
1001
  def __eq__(self, other):
999
- return type(self) == type(other)
1002
+ return type(self) == type(other) # nopep8
1000
1003
 
1001
1004
  def __hash__(self):
1002
1005
  return hash(type(self))
@@ -395,7 +395,7 @@ class FeedDictDataProvider(DataProviderBase):
395
395
  if enqueue_args is not None:
396
396
  self.queue.put(enqueue_args)
397
397
  with self.state_change_cond:
398
- self.state_change_cond.notifyAll()
398
+ self.state_change_cond.notify_all()
399
399
  self.batches.advance(1)
400
400
 
401
401
  self.reached_end = not self.batches.has_more()
@@ -407,7 +407,7 @@ class FeedDictDataProvider(DataProviderBase):
407
407
  finally:
408
408
  with self.state_change_cond:
409
409
  self.thread_finished = True
410
- self.state_change_cond.notifyAll()
410
+ self.state_change_cond.notify_all()
411
411
 
412
412
  def have_more_data(self, session):
413
413
  """
@@ -230,7 +230,7 @@ class HyperParam:
230
230
  x = 1.0 - eps
231
231
  import scipy.special
232
232
 
233
- return self.dtype(scipy.special.ndtri(x))
233
+ return self.dtype(scipy.special.ndtri(x)) # noqa
234
234
 
235
235
  def get_initial_value(self):
236
236
  """
@@ -6291,9 +6291,11 @@ class ChoiceLayer(BaseChoiceLayer):
6291
6291
  out=self._debug_out,
6292
6292
  x=labels[0],
6293
6293
  args={
6294
- "step": self.network.get_rec_step_index()
6295
- if self.network.have_rec_step_info()
6296
- else tf.constant(-1),
6294
+ "step": (
6295
+ self.network.get_rec_step_index()
6296
+ if self.network.have_rec_step_info()
6297
+ else tf.constant(-1)
6298
+ ),
6297
6299
  "base_beam_in": base_beam_in,
6298
6300
  "scores_in_orig": self.sources[0].output.placeholder,
6299
6301
  "scores_in": scores_in,
@@ -8213,6 +8215,7 @@ class SelfAttentionLayer(_ConcatInputLayer):
8213
8215
  :param tf.Tensor v: [B,H,T,V]
8214
8216
  :param tf.Tensor mask: shape [B,T]
8215
8217
  """
8218
+
8216
8219
  # noinspection PyUnusedLocal,PyShadowingNames
8217
8220
  def _masked_transform_mask(value, prev_value, mask):
8218
8221
  """
@@ -8506,33 +8509,35 @@ class KenLmStateLayer(_ConcatInputLayer):
8506
8509
  tf.less_equal(prev_step, 2),
8507
8510
  lambda: py_print(
8508
8511
  new_rel_scores,
8509
- [
8510
- str(self),
8511
- "; step: ",
8512
- prev_step,
8513
- "; input shape: ",
8514
- tf.shape(self.input_data.placeholder),
8515
- str(self.input_data),
8516
- "; input: ",
8517
- self.input_data.placeholder,
8518
- "; strings shape: ",
8519
- tf.shape(next_strings),
8520
- "; strings: ",
8521
- "'" + next_strings + "'",
8522
- "; new_abs_scores: ",
8523
- new_abs_scores,
8524
- "; sparse rel scores: ",
8525
- new_abs_scores - prev_scores,
8526
- "; min/max/mean rel scores: ",
8527
- tf.reduce_min(new_rel_scores),
8528
- "/",
8529
- tf.reduce_max(new_rel_scores),
8530
- "/",
8531
- tf.reduce_mean(new_rel_scores),
8532
- ]
8533
- + ["; vocab: ", self.tf_vocab]
8534
- if self.tf_vocab is not None
8535
- else [],
8512
+ (
8513
+ [
8514
+ str(self),
8515
+ "; step: ",
8516
+ prev_step,
8517
+ "; input shape: ",
8518
+ tf.shape(self.input_data.placeholder),
8519
+ str(self.input_data),
8520
+ "; input: ",
8521
+ self.input_data.placeholder,
8522
+ "; strings shape: ",
8523
+ tf.shape(next_strings),
8524
+ "; strings: ",
8525
+ "'" + next_strings + "'",
8526
+ "; new_abs_scores: ",
8527
+ new_abs_scores,
8528
+ "; sparse rel scores: ",
8529
+ new_abs_scores - prev_scores,
8530
+ "; min/max/mean rel scores: ",
8531
+ tf.reduce_min(new_rel_scores),
8532
+ "/",
8533
+ tf.reduce_max(new_rel_scores),
8534
+ "/",
8535
+ tf.reduce_mean(new_rel_scores),
8536
+ ]
8537
+ + ["; vocab: ", self.tf_vocab]
8538
+ if self.tf_vocab is not None
8539
+ else []
8540
+ ),
8536
8541
  ),
8537
8542
  lambda: new_rel_scores,
8538
8543
  )
@@ -9897,6 +9902,7 @@ class BaseRNNCell(rnn_cell.RNNCell):
9897
9902
  return x
9898
9903
 
9899
9904
 
9905
+ # noinspection PyAbstractClass
9900
9906
  class VanillaLSTMCell(BaseRNNCell):
9901
9907
  """
9902
9908
  Just a vanilla LSTM cell, which is compatible to our NativeLSTM (v1 and v2).
@@ -9964,6 +9970,7 @@ class VanillaLSTMCell(BaseRNNCell):
9964
9970
  return new_h, rnn_cell.LSTMStateTuple(new_c, new_h)
9965
9971
 
9966
9972
 
9973
+ # noinspection PyAbstractClass
9967
9974
  class RHNCell(BaseRNNCell):
9968
9975
  """
9969
9976
  Recurrent Highway Layer.
@@ -10119,6 +10126,7 @@ class RHNCell(BaseRNNCell):
10119
10126
  return current_state, current_state
10120
10127
 
10121
10128
 
10129
+ # noinspection PyAbstractClass
10122
10130
  class _WrapBaseCell(BaseRNNCell):
10123
10131
  """
10124
10132
  Simpler helper wrapper class, for :class:`BaseRNNCell`.
@@ -10172,6 +10180,7 @@ class _WrapBaseCell(BaseRNNCell):
10172
10180
  return self.cell.call(inputs, state)
10173
10181
 
10174
10182
 
10183
+ # noinspection PyAbstractClass
10175
10184
  class BlocksparseLSTMCell(_WrapBaseCell):
10176
10185
  """
10177
10186
  Standard LSTM but uses OpenAI blocksparse kernels to support bigger matrices.
@@ -10269,6 +10278,7 @@ class BlocksparseLSTMCell(_WrapBaseCell):
10269
10278
  m1["bias"].load(b_new, session=session)
10270
10279
 
10271
10280
 
10281
+ # noinspection PyAbstractClass
10272
10282
  class BlocksparseMultiplicativeMultistepLSTMCell(_WrapBaseCell):
10273
10283
  """
10274
10284
  Multiplicative LSTM with multiple steps, as in the OpenAI blocksparse paper.
@@ -10324,6 +10334,7 @@ class BlocksparseMultiplicativeMultistepLSTMCell(_WrapBaseCell):
10324
10334
  return y
10325
10335
 
10326
10336
 
10337
+ # noinspection PyAbstractClass
10327
10338
  class LayerNormVariantsLSTMCell(BaseRNNCell):
10328
10339
  """LSTM unit with layer normalization and recurrent dropout
10329
10340
 
@@ -10988,6 +10999,7 @@ class TwoDLSTMLayer(LayerBase):
10988
10999
  return y
10989
11000
 
10990
11001
 
11002
+ # noinspection PyAbstractClass
10991
11003
  class ZoneoutLSTMCell(BaseRNNCell):
10992
11004
  """
10993
11005
  Wrapper for tf LSTM to create Zoneout LSTM Cell.
@@ -625,7 +625,7 @@ class MultiChannelMultiResolutionStftLayer(_ConcatInputLayer):
625
625
  # noinspection PyPackageRequirements
626
626
  import scipy.signal
627
627
 
628
- window = tf.constant(scipy.signal.windows.blackman(frame_size), dtype=tf.float32)
628
+ window = tf.constant(scipy.signal.windows.blackman(frame_size), dtype=tf.float32) # noqa
629
629
  elif self._window == "None" or self._window == "ones":
630
630
  window = tf.ones((window_length,), dtype=dtype)
631
631
  else:
@@ -359,9 +359,9 @@ class OpMaker(object):
359
359
  code_set_io += "outputs[%i] = &output_%i;\n" % (out_idx, out_idx)
360
360
  cshape = "TensorShape({%s})" % ", ".join(
361
361
  [
362
- str(dim)
363
- if isinstance(dim, int)
364
- else ("inputs[%i]->dim_size(%i)" % dim) # also see make_dim_str
362
+ (
363
+ str(dim) if isinstance(dim, int) else ("inputs[%i]->dim_size(%i)" % dim)
364
+ ) # also see make_dim_str
365
365
  for dim in v["shape"]
366
366
  ]
367
367
  )
@@ -704,7 +704,7 @@ def load_dump_file(filename):
704
704
  data = _read_bytes()
705
705
  assert len(data) == numpy.prod(dims) * dtype.itemsize
706
706
  # noinspection PyTypeChecker
707
- v_flat = numpy.fromstring(data, dtype=dtype)
707
+ v_flat = numpy.fromstring(data, dtype=dtype) # noqa
708
708
  v = v_flat.reshape(dims)
709
709
  return v
710
710
 
@@ -931,9 +931,11 @@ class Updater(object):
931
931
 
932
932
  grads_and_vars = [
933
933
  (
934
- hvd.allreduce(grad, average=self.config.is_true("horovod_avg_grad"))
935
- if grad is not None
936
- else None,
934
+ (
935
+ hvd.allreduce(grad, average=self.config.is_true("horovod_avg_grad"))
936
+ if grad is not None
937
+ else None
938
+ ),
937
939
  var,
938
940
  )
939
941
  for (grad, var) in grads_and_vars
@@ -1028,6 +1030,7 @@ def accum_grad_multiple_step(grad, var, train_step, num_accum_steps):
1028
1030
  )
1029
1031
 
1030
1032
 
1033
+ # noinspection PyAbstractClass
1031
1034
  class _KerasOptimizerWrapper(Optimizer):
1032
1035
  """
1033
1036
  Wraps a TF optimizer into a standard TF optimizer.
@@ -1090,6 +1093,7 @@ class _KerasOptimizerWrapper(Optimizer):
1090
1093
  return self.keras_optimizer._resource_apply_sparse(grad, handle, indices, None)
1091
1094
 
1092
1095
 
1096
+ # noinspection PyAbstractClass
1093
1097
  class BaseCustomOptimizer(Optimizer):
1094
1098
  """
1095
1099
  Base class for our own optimizer implementations.
@@ -1200,6 +1204,7 @@ class BaseCustomOptimizer(Optimizer):
1200
1204
  return dense
1201
1205
 
1202
1206
 
1207
+ # noinspection PyAbstractClass
1203
1208
  class CustomGradientDescentOptimizer(BaseCustomOptimizer):
1204
1209
  """
1205
1210
  Just an example implementation for simple gradient descent.
@@ -1217,6 +1222,7 @@ class CustomGradientDescentOptimizer(BaseCustomOptimizer):
1217
1222
  return self._assign_sub(ref=var, updates=lr * grad, indices=indices).op
1218
1223
 
1219
1224
 
1225
+ # noinspection PyAbstractClass
1220
1226
  class NormalizedSGD(CustomGradientDescentOptimizer):
1221
1227
  """
1222
1228
  All grads are L2 normalized (via :func:`tf.nn.l2_normalize`), otherwise it's standard SGD.
@@ -1234,6 +1240,7 @@ class NormalizedSGD(CustomGradientDescentOptimizer):
1234
1240
  return super(NormalizedSGD, self)._apply(grad=tf.nn.l2_normalize(grad, None), var=var, indices=indices)
1235
1241
 
1236
1242
 
1243
+ # noinspection PyAbstractClass
1237
1244
  class NeuralOptimizer1(BaseCustomOptimizer):
1238
1245
  """
1239
1246
  Via Neural Optimizer Search with Reinforcement Learning (https://proceedings.mlr.press/v70/bello17a/bello17a.pdf).
@@ -1282,6 +1289,7 @@ class NeuralOptimizer1(BaseCustomOptimizer):
1282
1289
  return tf.group(*[var_update, m_t])
1283
1290
 
1284
1291
 
1292
+ # noinspection PyAbstractClass
1285
1293
  class GradVarianceScaledOptimizer(BaseCustomOptimizer):
1286
1294
  """
1287
1295
  Let m be the running average of g.
@@ -1344,6 +1352,7 @@ class GradVarianceScaledOptimizer(BaseCustomOptimizer):
1344
1352
  return tf.group(*[var_update, m_t])
1345
1353
 
1346
1354
 
1355
+ # noinspection PyAbstractClass
1347
1356
  class NadamOptimizer(tf_compat.v1.train.AdamOptimizer):
1348
1357
  """
1349
1358
  Optimizer that implements the Nadam algorithm.
@@ -1439,6 +1448,7 @@ class NadamOptimizer(tf_compat.v1.train.AdamOptimizer):
1439
1448
  return control_flow_ops.group(*[var_update, m_bar, v_t])
1440
1449
 
1441
1450
 
1451
+ # noinspection PyAbstractClass
1442
1452
  class CustomAdamOptimizer(BaseCustomOptimizer):
1443
1453
  """
1444
1454
  Reimplementation of Adam.
@@ -1515,6 +1525,7 @@ class CustomAdamOptimizer(BaseCustomOptimizer):
1515
1525
  return tf.group(*update_ops + [update_beta1, update_beta2], name=name_scope)
1516
1526
 
1517
1527
 
1528
+ # noinspection PyAbstractClass
1518
1529
  class AMSGradOptimizer(Optimizer):
1519
1530
  """
1520
1531
  https://colab.research.google.com/notebook#fileId=1xXFAuHM2Ae-OmF5M8Cn9ypGCa_HHBgfG&scrollTo=N1-2wPHN1Otn
@@ -1555,7 +1566,7 @@ class AMSGradOptimizer(Optimizer):
1555
1566
  learning_rate /= tf.sqrt(self.t)
1556
1567
  update_ops = []
1557
1568
 
1558
- for (g, var) in gradient_variables:
1569
+ for g, var in gradient_variables:
1559
1570
  m = self.m[var].assign(self.beta1 * self.m[var] + (1 - self.beta1) * g)
1560
1571
  v = self.v[var].assign(self.beta2 * self.v[var] + (1 - self.beta2) * g * g)
1561
1572
  v_hat = self.v_hat[var].assign(tf.maximum(self.v_hat[var], v))
@@ -2271,7 +2271,11 @@ def constant_with_shape(x, shape, dtype=None, name="constant_with_shape"):
2271
2271
  :rtype: tf.Tensor
2272
2272
  """
2273
2273
  with tf.name_scope(name):
2274
- if type(x) in [int, float, bool] and type(shape) in [list, tuple] and all([type(d) == int for d in shape]):
2274
+ if (
2275
+ isinstance(x, (int, float, bool))
2276
+ and isinstance(shape, (list, tuple))
2277
+ and all(isinstance(d, int) for d in shape)
2278
+ ):
2275
2279
  if dtype is None:
2276
2280
  dtype = {int: tf.int32, float: tf.float32, bool: tf.bool}[type(x)]
2277
2281
  if x in (0, 0.0, False):
@@ -3033,9 +3037,9 @@ class OpCodeCompiler(NativeCodeCompiler):
3033
3037
  "tf_version": describe_tensorflow_version(),
3034
3038
  "with_cuda": self._with_cuda(),
3035
3039
  "cuda_path": self._cuda_env.cuda_path if self._with_cuda() else None,
3036
- "nvcc_opts": (tuple(self._cuda_env.get_compiler_opts()) + tuple(self._nvcc_opts))
3037
- if self._with_cuda()
3038
- else None,
3040
+ "nvcc_opts": (
3041
+ (tuple(self._cuda_env.get_compiler_opts()) + tuple(self._nvcc_opts)) if self._with_cuda() else None
3042
+ ),
3039
3043
  }
3040
3044
  )
3041
3045
  return d
@@ -3161,7 +3165,7 @@ def make_var_tuple(v):
3161
3165
  :rtype: tuple[tf.Tensor]
3162
3166
  """
3163
3167
  if isinstance(v, (int, float, tf.Tensor, tf.Operation)):
3164
- return (v,)
3168
+ return (v,) # noqa
3165
3169
  if isinstance(v, list):
3166
3170
  return tuple(v)
3167
3171
  assert isinstance(v, tuple)
@@ -5073,7 +5077,7 @@ def simplify_add(a, b):
5073
5077
  if isinstance(b, int):
5074
5078
  b = numpy.int32(b) # use right type
5075
5079
  if isinstance(b, float):
5076
- b = numpy.float(b)
5080
+ b = numpy.float(b) # noqa
5077
5081
  if a.op.type in {"Add", "AddV2"}:
5078
5082
  a_dyn_parts = []
5079
5083
  a_const_parts = [b] if numpy.count_nonzero(b) > 0 else []
@@ -5469,10 +5473,7 @@ def gaussian_kernel_2d(size, std):
5469
5473
  if isinstance(size, (tuple, list)):
5470
5474
  size_x, size_y = size
5471
5475
  else:
5472
- size_x, size_y, = (
5473
- size,
5474
- size,
5475
- )
5476
+ size_x, size_y = size, size
5476
5477
  if isinstance(std, (tuple, list)):
5477
5478
  std_x, std_y = std
5478
5479
  else:
@@ -39,6 +39,7 @@ _MarkedDim = MarkedDim
39
39
  # Provide some simple wrappers. https://github.com/rwth-i6/returnn/issues/782
40
40
  # Use CamelCase function names (invalidates PEP8) to make it look like a class instance.
41
41
 
42
+
42
43
  # noinspection PyPep8Naming
43
44
  def FeatureDim(description, dimension, **kwargs):
44
45
  """
@@ -324,7 +325,7 @@ class BatchInfo:
324
325
  # We want to get a reasonable order.
325
326
  same_type_last_idx = None
326
327
  for i, dim_ in enumerate(all_virtual_dims):
327
- if type(dim_) == type(dim):
328
+ if type(dim_) == type(dim): # noqa
328
329
  same_type_last_idx = i
329
330
  if same_type_last_idx is not None:
330
331
  all_virtual_dims.insert(same_type_last_idx + 1, dim)
@@ -305,9 +305,19 @@ def create_data_loader_from_batches(
305
305
  """
306
306
  if loader_opts is None:
307
307
  loader_opts: Dict[str, Any] = {}
308
+ else:
309
+ loader_opts = loader_opts.copy()
310
+
311
+ # Make sure to use workers unless specified otherwise to ensure reasonable GPU
312
+ # utilization and to work around some issues wrt. overflowing ulimits when
313
+ # workers are non-persistent.
314
+ #
315
+ # See for context:
316
+ # - https://github.com/rwth-i6/returnn/issues/1560
317
+ # - https://github.com/pytorch/pytorch/issues/129868
318
+ loader_opts.setdefault("num_workers", 1)
308
319
 
309
320
  if loader_opts.get("num_workers"):
310
- loader_opts = loader_opts.copy()
311
321
  loader_opts.setdefault("persistent_workers", True)
312
322
  loader_opts["worker_init_fn"] = _DataLoaderWorkerInitFunc(
313
323
  other_worker_init_fn=loader_opts.get("worker_init_fn")
@@ -1204,7 +1204,7 @@ def obj_diff_list(self, other, **kwargs):
1204
1204
  return ["%sself is None and other is %r" % (prefix, other)]
1205
1205
  if self is not None and other is None:
1206
1206
  return ["%sother is None and self is %r" % (prefix, self)]
1207
- if type(self) != type(other):
1207
+ if type(self) != type(other): # noqa
1208
1208
  return ["%stype diff: self is %s but other is %s" % (prefix, type(self).__name__, type(other).__name__)]
1209
1209
 
1210
1210
  if allowed_mapping:
@@ -1349,7 +1349,7 @@ def init_thread_join_hack():
1349
1349
  # https://github.com/albertz/playground/blob/master/thread-join-block.py
1350
1350
  # https://github.com/albertz/playground/blob/master/cond-wait-block.py
1351
1351
  return
1352
- main_thread = threading.currentThread()
1352
+ main_thread = threading.current_thread()
1353
1353
  # noinspection PyUnresolvedReferences,PyProtectedMember
1354
1354
  assert isinstance(main_thread, threading._MainThread)
1355
1355
  main_thread_id = thread.get_ident()
@@ -1487,7 +1487,7 @@ def interrupt_main():
1487
1487
  :return: nothing
1488
1488
  """
1489
1489
  # noinspection PyProtectedMember,PyUnresolvedReferences
1490
- is_main_thread = isinstance(threading.currentThread(), threading._MainThread)
1490
+ is_main_thread = isinstance(threading.current_thread(), threading._MainThread)
1491
1491
  if is_quitting(): # ignore if we are already quitting
1492
1492
  if is_main_thread: # strange to get again in main thread
1493
1493
  raise Exception("interrupt_main() from main thread while already quitting")
@@ -1442,7 +1442,7 @@ def dump_all_thread_tracebacks(exclude_thread_ids=None, file=None):
1442
1442
  thread = threads.get(tid)
1443
1443
  if thread:
1444
1444
  assert isinstance(thread, threading.Thread)
1445
- if thread is threading.currentThread():
1445
+ if thread is threading.current_thread():
1446
1446
  tags += ["current"]
1447
1447
  # noinspection PyProtectedMember,PyUnresolvedReferences
1448
1448
  if isinstance(thread, threading._MainThread):
@@ -75,7 +75,7 @@ def dump_all_thread_tracebacks(exclude_thread_ids=None, exclude_self=False):
75
75
  thread_ = threads.get(tid)
76
76
  if thread_:
77
77
  assert isinstance(thread_, threading.Thread)
78
- if thread_ is threading.currentThread():
78
+ if thread_ is threading.current_thread():
79
79
  tags += ["current"]
80
80
  # noinspection PyUnresolvedReferences,PyProtectedMember
81
81
  if isinstance(thread_, threading._MainThread):
@@ -136,7 +136,7 @@ def init_better_exchook():
136
136
  # noinspection PyBroadException
137
137
  try:
138
138
  # noinspection PyUnresolvedReferences,PyProtectedMember
139
- is_main_thread = isinstance(threading.currentThread(), threading._MainThread)
139
+ is_main_thread = isinstance(threading.current_thread(), threading._MainThread)
140
140
  except Exception: # Can happen at a very late state while quitting.
141
141
  if exc_type is KeyboardInterrupt:
142
142
  return
@@ -147,12 +147,12 @@ def init_better_exchook():
147
147
  return
148
148
  # An unhandled exception in the main thread. This means that we are going to quit now.
149
149
  sys.exited = True
150
- print("Unhandled exception %s in thread %s, proc %i." % (exc_type, threading.currentThread(), os.getpid()))
150
+ print("Unhandled exception %s in thread %s, proc %i." % (exc_type, threading.current_thread(), os.getpid()))
151
151
  if exc_type is KeyboardInterrupt:
152
152
  return
153
153
 
154
154
  # noinspection PyUnresolvedReferences,PyProtectedMember
155
- if isinstance(threading.currentThread(), threading._MainThread):
155
+ if isinstance(threading.current_thread(), threading._MainThread):
156
156
  main_thread_id = thread.get_ident()
157
157
  if not isinstance(exc_type, Exception):
158
158
  # We are the main thread and we got an exit-exception. This is likely fatal.
@@ -403,7 +403,7 @@ def init_ipython_kernel():
403
403
  # Do in mainthread to avoid history sqlite DB errors at exit.
404
404
  # https://github.com/ipython/ipython/issues/680
405
405
  # noinspection PyUnresolvedReferences,PyProtectedMember
406
- assert isinstance(threading.currentThread(), threading._MainThread)
406
+ assert isinstance(threading.current_thread(), threading._MainThread)
407
407
  try:
408
408
  ip = socket.gethostbyname(socket.gethostname())
409
409
  connection_file = "ipython-kernel-%s-%s.json" % (ip, os.getpid())
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: returnn
3
- Version: 1.20240628.104058
3
+ Version: 1.20240702.142741
4
4
  Summary: The RWTH extensible training framework for universal recurrent neural networks
5
5
  Home-page: https://github.com/rwth-i6/returnn/
6
6
  Author: Albert Zeyer