returnn 1.20250430.145858__tar.gz → 1.20250508.181644__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of returnn might be problematic. Click here for more details.

Files changed (476) hide show
  1. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/CONTRIBUTING.md +1 -1
  2. {returnn-1.20250430.145858/returnn.egg-info → returnn-1.20250508.181644}/PKG-INFO +1 -1
  3. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/__init__.py +0 -1
  4. returnn-1.20250508.181644/_setup_info_generated.py +2 -0
  5. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-tf-search-compiled-graph.py +6 -8
  6. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/pyproject.toml +4 -0
  7. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/basic.py +24 -25
  8. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/cached.py +4 -3
  9. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/distrib_files.py +1 -2
  10. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/generating.py +20 -20
  11. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/hdf.py +9 -9
  12. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/lm.py +25 -13
  13. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/meta.py +39 -38
  14. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/normalization_data.py +1 -1
  15. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/postprocessing.py +9 -9
  16. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/sprint.py +8 -7
  17. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/util/strings.py +0 -1
  18. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/util/vocabulary.py +3 -3
  19. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/graph_editor/subgraph.py +1 -2
  20. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/graph_editor/transform.py +1 -2
  21. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/graph_editor/util.py +1 -2
  22. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/_backend.py +4 -3
  23. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/_utils.py +1 -1
  24. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/audio/mel.py +0 -1
  25. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/const.py +3 -3
  26. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/device.py +0 -1
  27. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/dropout.py +1 -1
  28. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/encoder/e_branchformer.py +1 -1
  29. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/loop.py +3 -3
  30. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/loss.py +0 -1
  31. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/matmul.py +0 -1
  32. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/run_ctx.py +9 -9
  33. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/signal.py +0 -1
  34. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/types.py +2 -4
  35. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/native_op.py +13 -0
  36. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/sprint/cache.py +2 -4
  37. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/sprint/interface.py +3 -4
  38. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tensor/_dim_extra.py +9 -9
  39. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tensor/_tensor_extra.py +20 -19
  40. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tensor/_tensor_op_overloads.py +0 -1
  41. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tensor/tensor.py +1 -1
  42. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tensor/tensor_dict.py +9 -9
  43. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/engine.py +60 -65
  44. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/frontend_layers/_backend.py +3 -3
  45. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/frontend_layers/cond.py +6 -6
  46. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/frontend_layers/debug_eager_mode.py +0 -1
  47. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/frontend_layers/layer.py +12 -12
  48. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/frontend_layers/loop.py +3 -3
  49. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/frontend_layers/make_layer.py +0 -1
  50. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/layers/base.py +56 -49
  51. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/layers/basic.py +60 -65
  52. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/layers/rec.py +74 -74
  53. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/native_op.py +1 -3
  54. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/network.py +60 -57
  55. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/updater.py +3 -3
  56. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/util/basic.py +24 -23
  57. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/data/extern_data.py +4 -5
  58. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/data/pipeline.py +3 -4
  59. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/engine.py +16 -16
  60. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/frontend/_backend.py +15 -15
  61. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/frontend/bridge.py +3 -3
  62. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/updater.py +8 -9
  63. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/util/debug_inf_nan.py +0 -2
  64. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/util/exception_helper.py +1 -1
  65. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/util/scaled_gradient.py +0 -1
  66. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/basic.py +1 -2
  67. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/better_exchook.py +14 -0
  68. {returnn-1.20250430.145858 → returnn-1.20250508.181644/returnn.egg-info}/PKG-INFO +1 -1
  69. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/_setup_test_env.py +0 -1
  70. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/rf_utils.py +3 -3
  71. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_Dataset.py +0 -1
  72. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_TFNativeOp.py +2 -6
  73. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_TFNetworkLayer.py +3 -3
  74. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_TFNetworkRecLayer.py +5 -7
  75. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_TranslationDataset.py +8 -11
  76. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_torch_frontend.py +1 -3
  77. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/dump-dataset.py +3 -3
  78. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/hdf_dump_translation_dataset.py +9 -11
  79. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/tf_avg_checkpoints.py +1 -1
  80. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/tf_inspect_checkpoint.py +1 -1
  81. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/torch_export_to_onnx.py +12 -12
  82. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/torch_scale_tuning.py +1 -1
  83. returnn-1.20250430.145858/_setup_info_generated.py +0 -2
  84. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/.editorconfig +0 -0
  85. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/.gitignore +0 -0
  86. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/.gitmodules +0 -0
  87. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/.kateconfig +0 -0
  88. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/CHANGELOG.md +0 -0
  89. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/CODEOWNERS +0 -0
  90. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/LICENSE +0 -0
  91. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/MANIFEST.in +0 -0
  92. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/README.rst +0 -0
  93. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/12AX.cluster_map +0 -0
  94. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/_setup_returnn_env.py +0 -0
  95. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-fwd.config +0 -0
  96. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-horovod-mpi.py +0 -0
  97. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-horovod-mpi.py.sh +0 -0
  98. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-horovod-mpi.sh +0 -0
  99. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-hyper-param-tuning.config +0 -0
  100. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-iter-dataset.py +0 -0
  101. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-list-devices.py +0 -0
  102. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-lua-torch-layer.config +0 -0
  103. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-pretrain.config +0 -0
  104. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-record-and-push-to-webserver.py +0 -0
  105. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-returnn-as-framework.py +0 -0
  106. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-rf-pt-benchmark.py +0 -0
  107. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-rf.config +0 -0
  108. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-rhn-enwik8.config +0 -0
  109. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-sprint-interface.py +0 -0
  110. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-tf-att-copy.config +0 -0
  111. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-tf-attention.config +0 -0
  112. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-tf-chunking-blstm.12ax.config +0 -0
  113. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-tf-contribrnn-lstm.12ax.config +0 -0
  114. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-tf-enc-dec.config +0 -0
  115. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-tf-hard-att-copy.config +0 -0
  116. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-tf-lstm-benchmark.py +0 -0
  117. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-tf-maxgradnorm-lstm.12ax.config +0 -0
  118. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-tf-native-lstm-lowmem.12ax.config +0 -0
  119. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-tf-native-lstm.12ax.config +0 -0
  120. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-tf-native-lstm2.12ax.config +0 -0
  121. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-tf-native-lstm2.12ax.tuned.config +0 -0
  122. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-tf-neural-transducer.12ax.config +0 -0
  123. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-tf-rec-explicit-lstm.config +0 -0
  124. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-tf-rec-explicit-rnn.config +0 -0
  125. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-tf-rec-self-att.config +0 -0
  126. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-tf-vanilla-lstm.12ax.config +0 -0
  127. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-timit-lstm-ctc.config +0 -0
  128. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-torch.config +0 -0
  129. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo-upd-mult-model.lstm.12ax.config +0 -0
  130. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/demo.sh +0 -0
  131. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/IAM/IAM_lines/a01-000u-00.png +0 -0
  132. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/IAM/IAM_lines/a01-007-04.png +0 -0
  133. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/IAM/IAM_lines/a01-007-06.png +0 -0
  134. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/IAM/README.txt +0 -0
  135. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/IAM/chars.txt +0 -0
  136. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/IAM/config_demo +0 -0
  137. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/IAM/config_fwd +0 -0
  138. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/IAM/config_real +0 -0
  139. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/IAM/create_IAM_dataset.py +0 -0
  140. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/IAM/decode.py +0 -0
  141. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/IAM/features/raw/demo.h5 +0 -0
  142. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/IAM/go.sh +0 -0
  143. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/IAM/lines.txt +0 -0
  144. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/IAM/split/eval.txt +0 -0
  145. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/IAM/split/train.txt +0 -0
  146. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/IAM/split/valid.txt +0 -0
  147. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/README.md +0 -0
  148. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/artificial/create_test_h5.py +0 -0
  149. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/artificial/forwardconfig +0 -0
  150. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/artificial/go.sh +0 -0
  151. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/artificial/trainconfig +0 -0
  152. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/artificial_rgb/create_test_h5.py +0 -0
  153. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/artificial_rgb/forwardconfig +0 -0
  154. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/artificial_rgb/go.sh +0 -0
  155. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/demos/mdlstm/artificial_rgb/trainconfig +0 -0
  156. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/requirements.txt +0 -0
  157. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/__init__.py +0 -0
  158. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/__main__.py +0 -0
  159. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/__old_mod_loader__.py +0 -0
  160. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/__setup__.py +0 -0
  161. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/config.py +0 -0
  162. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/__init__.py +0 -0
  163. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/audio.py +0 -0
  164. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/bundle_file.py +0 -0
  165. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/cached2.py +0 -0
  166. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/map.py +0 -0
  167. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/multi_proc.py +0 -0
  168. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/numpy_dump.py +0 -0
  169. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/raw_wav.py +0 -0
  170. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/stereo.py +0 -0
  171. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/text_dict.py +0 -0
  172. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/util/__init__.py +0 -0
  173. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/datasets/util/feature_extraction.py +0 -0
  174. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/engine/__init__.py +0 -0
  175. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/engine/base.py +0 -0
  176. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/engine/batch.py +0 -0
  177. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/__init__.py +0 -0
  178. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/__main__.py +0 -0
  179. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/.git +0 -0
  180. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/.gitignore +0 -0
  181. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/LICENSE +0 -0
  182. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/README.md +0 -0
  183. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/aligner.gif +0 -0
  184. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/check.png +0 -0
  185. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/core.cu +0 -0
  186. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/core.h +0 -0
  187. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/core_cpu.cpp +0 -0
  188. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/pytorch_binding/LICENSE +0 -0
  189. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/pytorch_binding/MANIFEST.in +0 -0
  190. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/pytorch_binding/README.md +0 -0
  191. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/pytorch_binding/binding.cpp +0 -0
  192. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/pytorch_binding/core.cu +0 -0
  193. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/pytorch_binding/core.h +0 -0
  194. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/pytorch_binding/requirements.txt +0 -0
  195. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/pytorch_binding/setup.py +0 -0
  196. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/pytorch_binding/warp_rna/__init__.py +0 -0
  197. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/pytorch_binding/warp_rna/test.py +0 -0
  198. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/ref_rna.py +0 -0
  199. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/setup.py +0 -0
  200. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/src/warp_rna_op.cc +0 -0
  201. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/src/warp_rna_op_kernel_tmpl.h +0 -0
  202. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/warp_rna/__init__.py +0 -0
  203. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/WarpRna/warp-rna/test.cpp +0 -0
  204. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/__init__.py +0 -0
  205. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/graph_editor/README.md +0 -0
  206. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/graph_editor/__init__.py +0 -0
  207. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/graph_editor/edit.py +0 -0
  208. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/graph_editor/reroute.py +0 -0
  209. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/extern/graph_editor/select.py +0 -0
  210. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/forward_iface.py +0 -0
  211. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/__init__.py +0 -0
  212. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/_cache.py +0 -0
  213. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/_native/__init__.py +0 -0
  214. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/_native/backend.cpp +0 -0
  215. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/_native/backend.hpp +0 -0
  216. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/_native/module.cpp +0 -0
  217. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/_native/module.hpp +0 -0
  218. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/_native/py_utils.hpp +0 -0
  219. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/_native/tensor_ops.cpp +0 -0
  220. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/_native/tensor_ops.hpp +0 -0
  221. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/_numpy_backend.py +0 -0
  222. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/_random_journal.py +0 -0
  223. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/array_.py +0 -0
  224. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/attention.py +0 -0
  225. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/audio/__init__.py +0 -0
  226. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/audio/specaugment.py +0 -0
  227. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/backend.py +0 -0
  228. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/build_from_dict.py +0 -0
  229. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/cond.py +0 -0
  230. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/container.py +0 -0
  231. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/control_flow_ctx.py +0 -0
  232. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/conv.py +0 -0
  233. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/conversions/__init__.py +0 -0
  234. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/conversions/espnet_e_branchformer.py +0 -0
  235. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/conversions/hf_llama.py +0 -0
  236. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/conversions/torch_nn.py +0 -0
  237. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/decoder/__init__.py +0 -0
  238. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/decoder/transformer.py +0 -0
  239. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/dims.py +0 -0
  240. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/dtype.py +0 -0
  241. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/encoder/__init__.py +0 -0
  242. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/encoder/base.py +0 -0
  243. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/encoder/conformer.py +0 -0
  244. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/encoder/conformer_v2.py +0 -0
  245. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/encoder/transformer.py +0 -0
  246. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/gradient.py +0 -0
  247. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/graph.py +0 -0
  248. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/hooks.py +0 -0
  249. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/init.py +0 -0
  250. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/label_smoothing.py +0 -0
  251. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/linear.py +0 -0
  252. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/math_.py +0 -0
  253. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/module.py +0 -0
  254. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/nested.py +0 -0
  255. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/normalization.py +0 -0
  256. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/parameter.py +0 -0
  257. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/parametrizations.py +0 -0
  258. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/parametrize.py +0 -0
  259. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/piecewise_linear.py +0 -0
  260. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/rand.py +0 -0
  261. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/rec.py +0 -0
  262. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/reduce.py +0 -0
  263. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/state.py +0 -0
  264. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/stepwise_scheduler.py +0 -0
  265. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/frontend/tensor_array.py +0 -0
  266. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/import_/__init__.py +0 -0
  267. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/import_/common.py +0 -0
  268. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/import_/git.py +0 -0
  269. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/import_/import_.py +0 -0
  270. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/learning_rate_control.py +0 -0
  271. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/log.py +0 -0
  272. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/native_op.cpp +0 -0
  273. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/pretrain.py +0 -0
  274. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/sprint/__init__.py +0 -0
  275. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/sprint/control.py +0 -0
  276. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/sprint/error_signals.py +0 -0
  277. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/sprint/extern_interface.py +0 -0
  278. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tensor/README.md +0 -0
  279. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tensor/__init__.py +0 -0
  280. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tensor/_tensor_mixin_base.py +0 -0
  281. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tensor/control_flow_ctx.py +0 -0
  282. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tensor/dim.py +0 -0
  283. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tensor/marked_dim.py +0 -0
  284. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tensor/utils.py +0 -0
  285. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/__init__.py +0 -0
  286. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/compat.py +0 -0
  287. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/data_pipeline.py +0 -0
  288. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/distributed.py +0 -0
  289. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/frontend_layers/README.md +0 -0
  290. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/frontend_layers/__init__.py +0 -0
  291. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/frontend_layers/_utils.py +0 -0
  292. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/frontend_layers/config_entry_points.py +0 -0
  293. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/frontend_layers/dims.py +0 -0
  294. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/frontend_layers/masked_computation.py +0 -0
  295. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/frontend_layers/parameter_assign.py +0 -0
  296. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/frontend_layers/prev_tensor_ref.py +0 -0
  297. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/frontend_low_level/__init__.py +0 -0
  298. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/frontend_low_level/_backend.py +0 -0
  299. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/horovod.py +0 -0
  300. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/hyper_param_tuning.py +0 -0
  301. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/layers/__init__.py +0 -0
  302. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/layers/segmental_model.py +0 -0
  303. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/layers/signal_processing.py +0 -0
  304. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/layers/variable.py +0 -0
  305. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/sprint.py +0 -0
  306. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/util/__init__.py +0 -0
  307. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/util/data.py +0 -0
  308. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/util/gradient_checkpoint.py +0 -0
  309. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/util/ken_lm.py +0 -0
  310. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/tf/util/open_fst.py +0 -0
  311. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/README.md +0 -0
  312. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/__init__.py +0 -0
  313. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/data/__init__.py +0 -0
  314. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/data/queued_data_iter.py +0 -0
  315. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/data/returnn_dataset_wrapper.py +0 -0
  316. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/data/tensor_utils.py +0 -0
  317. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/distributed.py +0 -0
  318. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/frontend/__init__.py +0 -0
  319. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/frontend/_rand.py +0 -0
  320. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/frontend/raw_ops.py +0 -0
  321. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/optim/README.md +0 -0
  322. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/optim/__init__.py +0 -0
  323. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/optim/lion.py +0 -0
  324. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/util/README.md +0 -0
  325. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/util/__init__.py +0 -0
  326. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/util/array_.py +0 -0
  327. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/util/diagnose_gpu.py +0 -0
  328. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/util/gradient_checkpoint.py +0 -0
  329. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/torch/util/module.py +0 -0
  330. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/__init__.py +0 -0
  331. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/bpe.py +0 -0
  332. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/debug.py +0 -0
  333. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/debug_helpers.py +0 -0
  334. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/file_cache.py +0 -0
  335. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/fsa.py +0 -0
  336. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/literal_py_to_pickle.py +0 -0
  337. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/lru_cache.py +0 -0
  338. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/math.py +0 -0
  339. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/multi_proc_non_daemonic_spawn.py +0 -0
  340. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/native_code_compiler.py +0 -0
  341. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/pprint.py +0 -0
  342. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/py-to-pickle.cpp +0 -0
  343. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/py_ext_mod_compiler.py +0 -0
  344. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/result_with_reason.py +0 -0
  345. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/sig_proc.py +0 -0
  346. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/task_system.py +0 -0
  347. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/train_proc_manager.py +0 -0
  348. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn/util/watch_memory.py +0 -0
  349. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn.egg-info/SOURCES.txt +0 -0
  350. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn.egg-info/dependency_links.txt +0 -0
  351. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn.egg-info/requires.txt +0 -0
  352. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/returnn.egg-info/top_level.txt +0 -0
  353. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/rnn.py +0 -0
  354. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/setup.cfg +0 -0
  355. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/setup.py +0 -0
  356. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/DummySprintExec.py +0 -0
  357. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/PyCharm-inspection-profile.xml +0 -0
  358. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/PyCharm.idea/.gitignore +0 -0
  359. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/PyCharm.idea/.name +0 -0
  360. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/PyCharm.idea/codeStyleSettings.xml +0 -0
  361. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/PyCharm.idea/codeStyles/Project.xml +0 -0
  362. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/PyCharm.idea/codeStyles/codeStyleConfig.xml +0 -0
  363. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/PyCharm.idea/inspectionProfiles/Project_Default.xml +0 -0
  364. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/PyCharm.idea/inspectionProfiles/profiles_settings.xml +0 -0
  365. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/PyCharm.idea/misc.xml +0 -0
  366. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/PyCharm.idea/modules.xml +0 -0
  367. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/PyCharm.idea/returnn.iml +0 -0
  368. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/PyCharm.idea/scopes/scope_settings.xml +0 -0
  369. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/_set_num_threads1.py +0 -0
  370. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/_setup_returnn_env.py +0 -0
  371. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/bpe-unicode-demo.codes +0 -0
  372. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/bpe-unicode-demo.vocab +0 -0
  373. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/lexicon_opt.fst +0 -0
  374. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/lexicon_opt.isyms +0 -0
  375. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/lexicon_opt.jpg +0 -0
  376. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/lexicon_opt.osyms +0 -0
  377. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/lint_common.py +0 -0
  378. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/pycharm-inspect.py +0 -0
  379. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/pylint.py +0 -0
  380. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/returnn-as-framework.py +0 -0
  381. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/spelling.dic +0 -0
  382. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_Config.py +0 -0
  383. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_Fsa.py +0 -0
  384. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_GeneratingDataset.py +0 -0
  385. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_HDFDataset.py +0 -0
  386. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_LearningRateControl.py +0 -0
  387. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_Log.py +0 -0
  388. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_MultiProcDataset.py +0 -0
  389. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_Pretrain.py +0 -0
  390. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_ResNet.py +0 -0
  391. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_SprintDataset.py +0 -0
  392. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_SprintInterface.py +0 -0
  393. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_TFEngine.py +0 -0
  394. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_TFNetworkSigProcLayer.py +0 -0
  395. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_TFUpdater.py +0 -0
  396. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_TFUtil.py +0 -0
  397. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_TF_determinism.py +0 -0
  398. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_TaskSystem.py +0 -0
  399. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_TaskSystem_SharedMem.py +0 -0
  400. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_Util.py +0 -0
  401. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_demos.py +0 -0
  402. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_fork_exec.py +0 -0
  403. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_hdf_dump.py +0 -0
  404. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_rf_array.py +0 -0
  405. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_rf_attention.py +0 -0
  406. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_rf_base.py +0 -0
  407. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_rf_cond.py +0 -0
  408. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_rf_const.py +0 -0
  409. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_rf_container.py +0 -0
  410. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_rf_conv.py +0 -0
  411. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_rf_decoder_transformer.py +0 -0
  412. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_rf_encoder_conformer.py +0 -0
  413. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_rf_gradient.py +0 -0
  414. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_rf_label_smoothing.py +0 -0
  415. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_rf_loop.py +0 -0
  416. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_rf_math.py +0 -0
  417. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_rf_normalization.py +0 -0
  418. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_rf_piecewise_linear.py +0 -0
  419. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_rf_rec.py +0 -0
  420. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_rf_reduce.py +0 -0
  421. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_rf_signal.py +0 -0
  422. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_tensor.py +0 -0
  423. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_threading.py +0 -0
  424. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_tools.py +0 -0
  425. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_torch_dataset.py +0 -0
  426. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_torch_engine.py +0 -0
  427. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_torch_internal_frontend.py +0 -0
  428. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/test_torch_util.py +0 -0
  429. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tests/torch_utils.py +0 -0
  430. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/_setup_returnn_env.py +0 -0
  431. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/analyze-dataset-batches.py +0 -0
  432. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/bliss-collect-seq-lens.py +0 -0
  433. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/bliss-dump-text.py +0 -0
  434. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/bliss-get-segment-names.py +0 -0
  435. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/bliss-to-ogg-zip.py +0 -0
  436. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/bpe-create-lexicon.py +0 -0
  437. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/calculate-word-error-rate.py +0 -0
  438. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/cleanup-old-models.py +0 -0
  439. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/collect-orth-symbols.py +0 -0
  440. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/collect-words.py +0 -0
  441. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/compile_native_op.py +0 -0
  442. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/compile_tf_graph.py +0 -0
  443. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/debug-dump-search-scores.py +0 -0
  444. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/debug-plot-search-scores.py +0 -0
  445. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/dump-dataset-raw-strings.py +0 -0
  446. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/dump-forward-stats.py +0 -0
  447. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/dump-forward.py +0 -0
  448. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/dump-network-json.py +0 -0
  449. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/dump-pickle.py +0 -0
  450. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/extract_state_tying_from_dataset.py +0 -0
  451. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/get-attention-weights.py +0 -0
  452. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/get-best-model-epoch.py +0 -0
  453. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/hdf_dump.py +0 -0
  454. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/import-blocks-mt-model.py +0 -0
  455. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/import-t2t-mt-model.py +0 -0
  456. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/lattice_rescorer/.gitignore +0 -0
  457. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/lattice_rescorer/Makefile +0 -0
  458. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/lattice_rescorer/README.md +0 -0
  459. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/lattice_rescorer/example/README.md +0 -0
  460. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/lattice_rescorer/example/libs_list +0 -0
  461. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/lattice_rescorer/example/network.040/i600_m600_m600.sgd_b16_lr0_cl2.newbobabs.config +0 -0
  462. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/lattice_rescorer/example/network.040/i600_m600_m600.sgd_b16_lr0_cl2.newbobabs.keep_over_epoch.lstm2.config +0 -0
  463. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/lattice_rescorer/example/rescore_lattice.sh +0 -0
  464. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/lattice_rescorer/example/state_vars_list +0 -0
  465. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/lattice_rescorer/example/tensor_names_list +0 -0
  466. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/lattice_rescorer/file.h +0 -0
  467. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/lattice_rescorer/htklatticerescorer.cc +0 -0
  468. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/lattice_rescorer/htklatticerescorer.h +0 -0
  469. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/lattice_rescorer/main.cc +0 -0
  470. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/lattice_rescorer/rescorer.h +0 -0
  471. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/lattice_rescorer/vocabulary.cc +0 -0
  472. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/lattice_rescorer/vocabulary.h +0 -0
  473. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/tf_inspect_summary_log.py +0 -0
  474. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/torch_avg_checkpoints.py +0 -0
  475. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/torch_inspect_checkpoint.py +0 -0
  476. {returnn-1.20250430.145858 → returnn-1.20250508.181644}/tools/torch_inspect_checkpoint_and_opt.py +0 -0
@@ -18,7 +18,7 @@ General rules when contributing to the code of RETURNN:
18
18
  Our code style uses most common Python conventions.
19
19
  If you are not an expert in Python, use PyCharm,
20
20
  and follow [our PyCharm configuration guide](https://github.com/rwth-i6/returnn/wiki/PyCharm-Configuration).
21
- Apply [black](https://black.readthedocs.io/).
21
+ Apply [ruff](https://github.com/astral-sh/ruff).
22
22
  * Make sure all [tests](https://returnn.readthedocs.io/en/latest/advanced/test_suite.html) pass.
23
23
  * At the time being, we want to support earlier versions of TF 1
24
24
  (consider at least TF 1.8, but maybe even TF 1.4)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: returnn
3
- Version: 1.20250430.145858
3
+ Version: 1.20250508.181644
4
4
  Summary: The RWTH extensible training framework for universal recurrent neural networks
5
5
  Home-page: https://github.com/rwth-i6/returnn/
6
6
  Author: Albert Zeyer
@@ -7,7 +7,6 @@ This was used for the old flat code file structure, for usage like::
7
7
  We want to support the same code.
8
8
  """
9
9
 
10
-
11
10
  from __future__ import annotations
12
11
  import os
13
12
  import sys
@@ -0,0 +1,2 @@
1
+ version = '1.20250508.181644'
2
+ long_version = '1.20250508.181644+git.0baf1d9'
@@ -8,12 +8,12 @@ This is just for demonstration, testing and debugging purpose. The search itself
8
8
 
9
9
  # No RETURNN dependency needed for the basic search. Just TF itself.
10
10
 
11
- import typing
12
11
  import os
13
12
  import json
14
13
  import argparse
15
14
  import tensorflow as tf
16
15
  import numpy
16
+ from typing import List, Optional, Tuple
17
17
 
18
18
 
19
19
  class Hyp:
@@ -26,7 +26,7 @@ class Hyp:
26
26
  :param int idx: hyp idx (to identify it in a beam)
27
27
  """
28
28
  self.idx = idx
29
- self.source_idx = None # type: typing.Optional[int] # source hyp idx
29
+ self.source_idx: Optional[int] = None # source hyp idx
30
30
  self.score = 0.0
31
31
  self.seq = [] # label seq
32
32
 
@@ -91,7 +91,6 @@ def main():
91
91
  # Now loop over decoder steps.
92
92
  max_dec_len = 100 # TODO better default... depending on input len. or configurable...
93
93
  for i in range(max_dec_len):
94
-
95
94
  # Loop over all stochastic variables.
96
95
  for stochastic_var in info["stochastic_var_order"]:
97
96
  assert isinstance(stochastic_var, str)
@@ -108,9 +107,7 @@ def main():
108
107
  # TODO: length norm here?
109
108
 
110
109
  # Select new hypotheses.
111
- best_possibilities = sorted(all_possibilities)[
112
- : args.beam_size
113
- ] # type: typing.List[typing.Tuple[float,int,Hyp]]
110
+ best_possibilities: List[Tuple[float, int, Hyp]] = sorted(all_possibilities)[: args.beam_size]
114
111
  assert len(best_possibilities) == args.beam_size
115
112
  hyps = [
116
113
  hyp.expand(idx=i, label=label, score=score)
@@ -121,8 +118,9 @@ def main():
121
118
  session.run(
122
119
  info["state_vars"]["stochastic_var_scores_%s" % stochastic_var] + "/Assign...?", # TODO...
123
120
  feed_dict={
124
- info["state_vars"]["stochastic_var_scores_%s" % stochastic_var]
125
- + "/Initial...?": [[hyp.seq[-1] for hyp in hyps]] # TODO...
121
+ info["state_vars"]["stochastic_var_scores_%s" % stochastic_var] + "/Initial...?": [
122
+ [hyp.seq[-1] for hyp in hyps]
123
+ ] # TODO...
126
124
  },
127
125
  )
128
126
 
@@ -13,5 +13,9 @@ extend-exclude = '''
13
13
  )/
14
14
  '''
15
15
 
16
+ [tool.ruff]
17
+ line-length = 120
18
+ target-version = "py38" # https://github.com/rwth-i6/returnn/issues/1326
19
+
16
20
  [build-system]
17
21
  requires = ["setuptools", "numpy"]
@@ -20,7 +20,7 @@ import math
20
20
  import numpy
21
21
  import functools
22
22
  import typing
23
- from typing import TYPE_CHECKING, Optional, Any, Union, Type, Dict, Sequence, List, Callable
23
+ from typing import TYPE_CHECKING, Optional, Any, Set, Tuple, Union, Type, Dict, Sequence, List, Callable
24
24
 
25
25
  from returnn.log import log
26
26
  from returnn.engine.batch import Batch, BatchSetGenerator
@@ -141,12 +141,10 @@ class Dataset:
141
141
  :param int _shard_index: local shard index, when sharding is enabled
142
142
  """
143
143
  self.name = name or ("dataset_id%s" % id(self))
144
- self.lock = None # type: Optional[RLock] # Used when manipulating our data potentially from multiple threads.
145
- self.rnd_seq_drop = None # type: typing.Optional[Random]
144
+ self.lock: Optional[RLock] = None # Used when manipulating our data potentially from multiple threads.
145
+ self.rnd_seq_drop: Optional[Random] = None
146
146
  self.num_inputs = 0 # usually not used, but num_outputs instead, which is more generic
147
- self.num_outputs = (
148
- None
149
- ) # type: typing.Optional[typing.Dict[str,typing.Tuple[int,int]]] # tuple is num-classes, len(shape). # nopep8
147
+ self.num_outputs: Optional[Dict[str, Tuple[int, int]]] = None # tuple is num-classes, len(shape).
150
148
  self.window = window
151
149
  self.seq_ordering = seq_ordering # "default", "sorted" or "random". See self.get_seq_order_for_epoch().
152
150
  self.fixed_random_seed = fixed_random_seed
@@ -159,10 +157,10 @@ class Dataset:
159
157
  self._seq_order_seq_lens_file = seq_order_seq_lens_file
160
158
  self._seq_order_seq_lens_by_idx = None
161
159
  # There is probably no use case for combining the two, so avoid potential misconfiguration.
162
- assert (
163
- self.partition_epoch == 1 or self.repeat_epoch == 1
164
- ), "Combining partition_epoch and repeat_epoch is prohibited."
165
- self.labels = {} # type: typing.Dict[str,typing.List[str]]
160
+ assert self.partition_epoch == 1 or self.repeat_epoch == 1, (
161
+ "Combining partition_epoch and repeat_epoch is prohibited."
162
+ )
163
+ self.labels: Dict[str, List[str]] = {}
166
164
  self.weights = {}
167
165
  self._num_timesteps = 0
168
166
  self._num_seqs = 0
@@ -213,8 +211,8 @@ class Dataset:
213
211
  getattr(self, "epoch", "<unknown>"),
214
212
  )
215
213
 
216
- _getnewargs_exclude_attrs = set() # type: typing.Set[str]
217
- _getnewargs_remap = {} # type: typing.Dict[str,str]
214
+ _getnewargs_exclude_attrs: Set[str] = set()
215
+ _getnewargs_remap: Dict[str, str] = {}
218
216
 
219
217
  @staticmethod
220
218
  def _create_from_reduce(cls, kwargs, state) -> Dataset:
@@ -660,12 +658,13 @@ class Dataset:
660
658
  )
661
659
  old_seq_index = seq_index
662
660
  seq_index = [i for i in seq_index if all_seq_tags[i] in self.seq_tags_filter]
663
- assert (
664
- seq_index
665
- ), "%s: empty after applying seq_list_filter_file. Example filter tags: %r, used tags: %r" % (
666
- self,
667
- sorted(self.seq_tags_filter)[:3],
668
- [all_seq_tags[i] for i in old_seq_index[:3]],
661
+ assert seq_index, (
662
+ "%s: empty after applying seq_list_filter_file. Example filter tags: %r, used tags: %r"
663
+ % (
664
+ self,
665
+ sorted(self.seq_tags_filter)[:3],
666
+ [all_seq_tags[i] for i in old_seq_index[:3]],
667
+ )
669
668
  )
670
669
  return seq_index
671
670
 
@@ -736,9 +735,9 @@ class Dataset:
736
735
  """
737
736
  self.epoch = epoch
738
737
  self.rnd_seq_drop = Random(self._get_random_seed_for_epoch(epoch=epoch))
739
- assert (
740
- self._num_shards == 1 or self.supports_sharding()
741
- ), f"{self}: does not support sharding, but got num_shards == {self._num_shards}"
738
+ assert self._num_shards == 1 or self.supports_sharding(), (
739
+ f"{self}: does not support sharding, but got num_shards == {self._num_shards}"
740
+ )
742
741
  return False
743
742
 
744
743
  def finish_epoch(self, *, free_resources: bool = False):
@@ -970,16 +969,16 @@ class Dataset:
970
969
  except Exception: # also not always available
971
970
  num_seqs = None # ignore
972
971
 
973
- if math.isinf(num_seqs):
972
+ if num_seqs is not None and math.isinf(num_seqs):
974
973
  if allow_only_lr_suitable:
975
974
  # cannot compute meaningful complete_frac for infinite num_seqs
976
975
  return None
977
976
  else:
978
977
  num_seqs = None
979
978
 
980
- assert (
981
- num_seqs is None or 0 <= sorted_seq_idx < num_seqs
982
- ), f"{self}: invalid seq indices: 0 <= seq_idx ({sorted_seq_idx}) < num_seqs ({num_seqs}) violated"
979
+ assert num_seqs is None or 0 <= sorted_seq_idx < num_seqs, (
980
+ f"{self}: invalid seq indices: 0 <= seq_idx ({sorted_seq_idx}) < num_seqs ({num_seqs}) violated"
981
+ )
983
982
  return self.generic_complete_frac(sorted_seq_idx, num_seqs)
984
983
 
985
984
  @property
@@ -46,9 +46,10 @@ class CachedDataset(Dataset):
46
46
  self._index_map = range(len(self._seq_index)) # sorted seq idx -> seq_index idx
47
47
  self._tag_idx = {} # type: typing.Dict[str,int] # map of tag -> real-seq-idx. call _update_tag_idx
48
48
  self.targets = {}
49
- self.target_keys = (
50
- []
51
- ) # the keys for which we provide data; we may have labels for additional keys in self.labels
49
+ # the keys for which we provide data;
50
+ # we may have labels for additional keys in self.labels
51
+ self.target_keys = []
52
+
52
53
  self.timestamps = None
53
54
 
54
55
  def initialize(self):
@@ -451,8 +451,7 @@ class DistributeFilesDataset(CachedDataset2):
451
451
  # We need to decide where to add this file, to the current or the next sub epoch.
452
452
  if not files_per_bin[bin_idx] or (
453
453
  # Better to add this file to the current sub epoch?
454
- abs((size_taken + size) - avg_size_per_sub_epoch)
455
- <= abs(size_taken - avg_size_per_sub_epoch)
454
+ abs((size_taken + size) - avg_size_per_sub_epoch) <= abs(size_taken - avg_size_per_sub_epoch)
456
455
  ):
457
456
  files_per_bin[bin_idx].append(f_tree)
458
457
  size_taken = 0
@@ -46,12 +46,12 @@ class GeneratingDataset(Dataset):
46
46
  output_dim["data"] = (input_dim * self.window, 2) # not sparse
47
47
  self.num_outputs = output_dim
48
48
  self.expected_load_seq_start = 0
49
- self._seq_order = None # type: Optional[Sequence[int]]
49
+ self._seq_order: Optional[Sequence[int]] = None
50
50
  self._num_seqs = num_seqs
51
51
  self._total_num_seqs = num_seqs
52
52
  self.random = numpy.random.RandomState(1)
53
53
  self.reached_final_seq = False
54
- self.added_data = [] # type: typing.List[DatasetSeq]
54
+ self.added_data: List[DatasetSeq] = []
55
55
  if self.seq_ordering in ("sorted", "sorted_reverse"):
56
56
  # For the dev/eval dataset, RETURNN automatically tries to sort them.
57
57
  # As this is not supported, just ignore it and reset it to the default order.
@@ -904,22 +904,24 @@ class DummyDatasetMultipleDataKeys(DummyDataset):
904
904
  seq_len = {}
905
905
  for key in self.data_keys:
906
906
  seq_len[key] = _seq_len
907
- assert set(data_keys) == set(
908
- seq_len.keys()
909
- ), "%s: the keys of seq_len (%s) must match the keys in data_keys=%s." % (
910
- self,
911
- str(seq_len.keys()),
912
- str(data_keys),
907
+ assert set(data_keys) == set(seq_len.keys()), (
908
+ "%s: the keys of seq_len (%s) must match the keys in data_keys=%s."
909
+ % (
910
+ self,
911
+ str(seq_len.keys()),
912
+ str(data_keys),
913
+ )
914
+ )
915
+ assert isinstance(output_dim, dict), (
916
+ "%s: output_dim %r must be a dict containing a definition for each key in data_keys." % (self, output_dim)
913
917
  )
914
- assert isinstance(
915
- output_dim, dict
916
- ), "%s: output_dim %r must be a dict containing a definition for each key in data_keys." % (self, output_dim)
917
- assert set(data_keys) == set(
918
- output_dim.keys()
919
- ), "%s: the keys of output_dim (%s) must match the keys in data_keys=%s." % (
920
- self,
921
- str(output_dim.keys()),
922
- str(data_keys),
918
+ assert set(data_keys) == set(output_dim.keys()), (
919
+ "%s: the keys of output_dim (%s) must match the keys in data_keys=%s."
920
+ % (
921
+ self,
922
+ str(output_dim.keys()),
923
+ str(data_keys),
924
+ )
923
925
  )
924
926
 
925
927
  super(DummyDatasetMultipleDataKeys, self).__init__(
@@ -2134,9 +2136,7 @@ class LibriSpeechCorpus(CachedDataset2):
2134
2136
  import os
2135
2137
  import zipfile
2136
2138
 
2137
- transs = (
2138
- {}
2139
- ) # type: typing.Dict[typing.Tuple[str,int,int,int],str] # (subdir, speaker-id, chapter-id, seq-id) -> transcription # nopep8
2139
+ transs: Dict[Tuple[str, int, int, int], str] = {} # (subdir, speaker-id, chapter-id, seq-id) -> transcription
2140
2140
  if self.use_zip:
2141
2141
  for name, zip_file in self._zip_files.items():
2142
2142
  assert isinstance(zip_file, zipfile.ZipFile)
@@ -37,9 +37,9 @@ class HDFDataset(CachedDataset):
37
37
  :param bool use_cache_manager: uses :func:`Util.cf` for files
38
38
  """
39
39
  super(HDFDataset, self).__init__(**kwargs)
40
- assert (
41
- self.partition_epoch == 1 or self.cache_byte_size_total_limit == 0
42
- ), "To use partition_epoch in HDFDatasets, disable caching by setting cache_byte_size=0"
40
+ assert self.partition_epoch == 1 or self.cache_byte_size_total_limit == 0, (
41
+ "To use partition_epoch in HDFDatasets, disable caching by setting cache_byte_size=0"
42
+ )
43
43
  self._use_cache_manager = use_cache_manager
44
44
  self.files = [] # type: typing.List[str] # file names
45
45
  self.h5_files = [] # type: typing.List[h5py.File]
@@ -1246,9 +1246,9 @@ class SimpleHDFWriter:
1246
1246
  self._datasets[name].resize(old_shape[0] + raw_data.shape[0], axis=0)
1247
1247
  expected_shape = (raw_data.shape[0],) + old_shape[1:]
1248
1248
  # append raw data to dataset
1249
- assert (
1250
- expected_shape == raw_data.shape
1251
- ), f"{self} insert: shape mismatch: expected {expected_shape}, got {raw_data.shape}"
1249
+ assert expected_shape == raw_data.shape, (
1250
+ f"{self} insert: shape mismatch: expected {expected_shape}, got {raw_data.shape}"
1251
+ )
1252
1252
  self._datasets[name][self._file.attrs["numTimesteps"] :] = raw_data
1253
1253
  self._file.attrs["numTimesteps"] += raw_data.shape[0]
1254
1254
  self._file.attrs["numSeqs"] += 1
@@ -1302,9 +1302,9 @@ class SimpleHDFWriter:
1302
1302
 
1303
1303
  offset = self._extra_num_time_steps[data_key] - raw_data.shape[0]
1304
1304
  expected_shape = (raw_data.shape[0],) + hdf_data.shape[1:]
1305
- assert (
1306
- expected_shape == raw_data.shape
1307
- ), f"{self} insert other {data_key!r}: shape mismatch: expected {expected_shape}, got {raw_data.shape}"
1305
+ assert expected_shape == raw_data.shape, (
1306
+ f"{self} insert other {data_key!r}: shape mismatch: expected {expected_shape}, got {raw_data.shape}"
1307
+ )
1308
1308
  hdf_data[offset:] = raw_data
1309
1309
 
1310
1310
  def insert_batch(self, inputs, seq_len, seq_tag, extra=None):
@@ -7,7 +7,22 @@ and some related helpers.
7
7
 
8
8
  from __future__ import annotations
9
9
 
10
- from typing import Optional, Union, Any, Callable, Iterator, List, Tuple, Set, BinaryIO, Dict, cast, Generator
10
+ from typing import (
11
+ Iterable,
12
+ Optional,
13
+ Sequence,
14
+ Union,
15
+ Any,
16
+ Callable,
17
+ Iterator,
18
+ List,
19
+ Tuple,
20
+ Set,
21
+ BinaryIO,
22
+ Dict,
23
+ cast,
24
+ Generator,
25
+ )
11
26
  import typing
12
27
  import os
13
28
  from io import IOBase
@@ -1472,8 +1487,8 @@ class TranslationDataset(CachedDataset2):
1472
1487
  }
1473
1488
 
1474
1489
  self._data_keys = self._source_data_keys + self._target_data_keys
1475
- self._data = {data_key: [] for data_key in self._data_keys} # type: typing.Dict[str,typing.List[numpy.ndarray]]
1476
- self._data_len = None # type: typing.Optional[int]
1490
+ self._data: Dict[str, List[numpy.ndarray]] = {data_key: [] for data_key in self._data_keys}
1491
+ self._data_len: Optional[int] = None
1477
1492
 
1478
1493
  self._vocabs = self._get_vocabs()
1479
1494
  self.num_outputs = {k: [max(self._vocabs[k].values()) + 1, 1] for k in self._vocabs.keys()} # all sparse
@@ -1489,7 +1504,7 @@ class TranslationDataset(CachedDataset2):
1489
1504
  unknown_label.setdefault(data_key, None)
1490
1505
  self._unknown_label = unknown_label
1491
1506
 
1492
- self._seq_order = None # type: typing.Optional[typing.Sequence[int]] # seq_idx -> line_nr
1507
+ self._seq_order: Optional[Sequence[int]] = None # seq_idx -> line_nr
1493
1508
  self._tag_prefix = "line-" # sequence tag is "line-n", where n is the line number
1494
1509
  self._thread = Thread(name="%r reader" % self, target=self._thread_main)
1495
1510
  self._thread.daemon = True
@@ -1878,14 +1893,11 @@ class TranslationFactorsDataset(TranslationDataset):
1878
1893
  assert file_prefix == self.target_file_prefix
1879
1894
  data_keys = self._target_data_keys
1880
1895
 
1881
- data = [
1896
+ data: List[List[numpy.ndarray]] = [
1882
1897
  self._factored_words_to_numpy(data_keys, s.decode("utf8").strip().split(), self._add_postfix[file_prefix])
1883
1898
  for s in data_strs
1884
- ] # type: typing.List[typing.List[numpy.ndarray]] # shape: (len(data_strs), len(data_keys))
1885
-
1886
- data = zip(
1887
- *data
1888
- ) # type: typing.Iterable[typing.Tuple[numpy.ndarray]] # shape: (len(data_keys), len(data_strs))
1899
+ ] # shape: (len(data_strs), len(data_keys))
1900
+ data: Iterable[Tuple[numpy.ndarray]] = zip(*data) # shape: (len(data_keys), len(data_strs))
1889
1901
 
1890
1902
  with self._lock:
1891
1903
  for i, data_ in enumerate(data):
@@ -1908,9 +1920,9 @@ class TranslationFactorsDataset(TranslationDataset):
1908
1920
  words_per_factor = [[]] * len(data_keys)
1909
1921
  elif len(data_keys) > 1:
1910
1922
  factored_words = [word.split(self._factor_separator) for word in words]
1911
- assert all(
1912
- len(factors) == len(data_keys) for factors in factored_words
1913
- ), "All words must have all factors. Expected: " + self._factor_separator.join(data_keys)
1923
+ assert all(len(factors) == len(data_keys) for factors in factored_words), (
1924
+ "All words must have all factors. Expected: " + self._factor_separator.join(data_keys)
1925
+ )
1914
1926
  words_per_factor = zip(*factored_words)
1915
1927
  words_per_factor = [list(w) for w in words_per_factor]
1916
1928
  else:
@@ -247,10 +247,10 @@ class MetaDataset(CachedDataset2):
247
247
  self.seq_order_control_dataset = seq_order_control_dataset
248
248
 
249
249
  # This will only initialize datasets needed for features occuring in data_map
250
- self.datasets = {
250
+ self.datasets: Dict[str, Dataset] = {
251
251
  key: init_dataset(datasets[key], extra_kwargs={"name": "%s_%s" % (self.name, key)}, parent_dataset=self)
252
252
  for key in self.dataset_keys
253
- } # type: typing.Dict[str,Dataset]
253
+ }
254
254
 
255
255
  self._seq_list_file = seq_list_file
256
256
  self.seq_list_original = self._load_seq_list(seq_list_file)
@@ -260,8 +260,8 @@ class MetaDataset(CachedDataset2):
260
260
 
261
261
  self.tag_idx = {tag: idx for (idx, tag) in enumerate(self.seq_list_original[self.default_dataset_key])}
262
262
 
263
- self._seq_lens = None # type: typing.Optional[typing.Dict[str,NumbersDict]]
264
- self._num_timesteps = None # type: typing.Optional[NumbersDict]
263
+ self._seq_lens: Optional[Dict[str, NumbersDict]] = None
264
+ self._num_timesteps: Optional[NumbersDict] = None
265
265
  self._seq_lens_file = seq_lens_file
266
266
  if seq_lens_file:
267
267
  seq_lens = load_json(filename=seq_lens_file)
@@ -290,7 +290,7 @@ class MetaDataset(CachedDataset2):
290
290
  self.num_outputs = self.data_dims
291
291
 
292
292
  self.orig_seq_order_is_initialized = False
293
- self.seq_list_ordered = None # type: typing.Optional[typing.Dict[str,typing.List[str]]]
293
+ self.seq_list_ordered: Optional[Dict[str, List[str]]] = None
294
294
 
295
295
  def _load_seq_list(self, seq_list_file: Optional[Union[str, Dict[str, str]]] = None) -> Dict[str, List[str]]:
296
296
  """
@@ -771,7 +771,7 @@ class ConcatDataset(CachedDataset2):
771
771
  for ds in self.datasets[1:]:
772
772
  assert ds.num_inputs == self.num_inputs
773
773
  assert ds.num_outputs == self.num_outputs
774
- self.dataset_seq_idx_offsets = None # type: typing.Optional[typing.List[int]]
774
+ self.dataset_seq_idx_offsets: Optional[List[int]] = None
775
775
 
776
776
  def init_seq_order(self, epoch=None, seq_list=None, seq_order=None):
777
777
  """
@@ -1017,9 +1017,9 @@ class CombinedDataset(CachedDataset2):
1017
1017
  for (dset_key, dset_data_key), data_key in data_map.items()
1018
1018
  }
1019
1019
 
1020
- self.dataset_seq_idx_boundaries = None # type: typing.Optional[typing.List[int]]
1021
- self.dataset_sorted_seq_idx_list = None # type: typing.Optional[typing.List[typing.Tuple[int,int]]]
1022
- self.used_num_seqs_per_subset = None # type: typing.Optional[typing.List[int]]
1020
+ self.dataset_seq_idx_boundaries: Optional[List[int]] = None
1021
+ self.dataset_sorted_seq_idx_list: Optional[List[Tuple[int, int]]] = None
1022
+ self.used_num_seqs_per_subset: Optional[List[int]] = None
1023
1023
 
1024
1024
  def init_seq_order(self, epoch=None, seq_list=None, seq_order=None):
1025
1025
  """
@@ -1180,9 +1180,9 @@ class CombinedDataset(CachedDataset2):
1180
1180
  :rtype: list[int]
1181
1181
  """
1182
1182
  assert self.partition_epoch in [None, 1], "partition_epoch not supported in combination with sampling_sizes."
1183
- assert (
1184
- self._seq_order_seq_lens_file is None
1185
- ), "seq_order_seq_lens_file not supported in combination with sampling_sizes."
1183
+ assert self._seq_order_seq_lens_file is None, (
1184
+ "seq_order_seq_lens_file not supported in combination with sampling_sizes."
1185
+ )
1186
1186
  assert not self.unique_seq_tags, "unique_seq_tags not supported in combination with sampling_sizes."
1187
1187
  assert self.seq_tags_filter is None, "seq_order_seq_lens_file in combination with sampling_sizes."
1188
1188
 
@@ -1445,7 +1445,7 @@ class ConcatSeqsDataset(CachedDataset2):
1445
1445
  self.repeat_in_between_last_frame_up_to_multiple_of = repeat_in_between_last_frame_up_to_multiple_of or {}
1446
1446
  self.pad_narrow_data_to_multiple_of_target_len = pad_narrow_data_to_multiple_of_target_len or {}
1447
1447
  if epoch_wise_filter is None:
1448
- self.epoch_wise_filter = None # type: Optional[EpochWiseFilter]
1448
+ self.epoch_wise_filter: Optional[EpochWiseFilter] = None
1449
1449
  elif isinstance(epoch_wise_filter, dict):
1450
1450
  self.epoch_wise_filter = EpochWiseFilter(epoch_wise_filter)
1451
1451
  else:
@@ -1471,10 +1471,8 @@ class ConcatSeqsDataset(CachedDataset2):
1471
1471
  self.seq_lens = eval(open(seq_len_file).read())
1472
1472
  assert isinstance(self.seq_lens, dict)
1473
1473
  self.full_seq_len_list = self._get_full_seq_lens_list()
1474
- self.cur_seq_list = None # type: typing.Optional[typing.List[str]] # list of seq tags
1475
- self.cur_sub_seq_idxs = (
1476
- None
1477
- ) # type: typing.Optional[typing.List[typing.List[int]]] # list of list of sub seq idxs
1474
+ self.cur_seq_list: typing.Optional[typing.List[str]] = None # list of seq tags
1475
+ self.cur_sub_seq_idxs: typing.Optional[typing.List[typing.List[int]]] = None # list of list of sub seq idxs
1478
1476
 
1479
1477
  def _get_full_seq_lens_list(self):
1480
1478
  """
@@ -1564,20 +1562,22 @@ class ConcatSeqsDataset(CachedDataset2):
1564
1562
  if seq_idx == 0: # some extra check, but enough to do for first seq only
1565
1563
  sub_dataset_keys = self.dataset.get_data_keys()
1566
1564
  for key in self.remove_in_between_postfix:
1567
- assert (
1568
- key in sub_dataset_keys
1569
- ), "%s: remove_in_between_postfix key %r not in sub dataset data-keys %r" % (
1570
- self,
1571
- key,
1572
- sub_dataset_keys,
1565
+ assert key in sub_dataset_keys, (
1566
+ "%s: remove_in_between_postfix key %r not in sub dataset data-keys %r"
1567
+ % (
1568
+ self,
1569
+ key,
1570
+ sub_dataset_keys,
1571
+ )
1573
1572
  )
1574
1573
  for key in self.repeat_in_between_last_frame_up_to_multiple_of:
1575
- assert (
1576
- key in sub_dataset_keys
1577
- ), "%s: repeat_in_between_last_frame_up_to_multiple_of key %r not in sub dataset data-keys %r" % (
1578
- self,
1579
- key,
1580
- sub_dataset_keys,
1574
+ assert key in sub_dataset_keys, (
1575
+ "%s: repeat_in_between_last_frame_up_to_multiple_of key %r not in sub dataset data-keys %r"
1576
+ % (
1577
+ self,
1578
+ key,
1579
+ sub_dataset_keys,
1580
+ )
1581
1581
  )
1582
1582
  for key in self.pad_narrow_data_to_multiple_of_target_len:
1583
1583
  assert key in sub_dataset_keys, (
@@ -1587,15 +1587,16 @@ class ConcatSeqsDataset(CachedDataset2):
1587
1587
  for sub_seq_idx, sub_seq_tag in zip(sub_seq_idxs, sub_seq_tags):
1588
1588
  self.dataset.load_seqs(sub_seq_idx, sub_seq_idx + 1)
1589
1589
  sub_dataset_tag = self.dataset.get_tag(sub_seq_idx)
1590
- assert (
1591
- sub_dataset_tag == sub_seq_tag
1592
- ), "%s: expected tag %r for sub seq idx %i but got %r, part of seq %i %r" % (
1593
- self,
1594
- sub_seq_tag,
1595
- sub_seq_idx,
1596
- sub_dataset_tag,
1597
- seq_idx,
1598
- seq_tag,
1590
+ assert sub_dataset_tag == sub_seq_tag, (
1591
+ "%s: expected tag %r for sub seq idx %i but got %r, part of seq %i %r"
1592
+ % (
1593
+ self,
1594
+ sub_seq_tag,
1595
+ sub_seq_idx,
1596
+ sub_dataset_tag,
1597
+ seq_idx,
1598
+ seq_tag,
1599
+ )
1599
1600
  )
1600
1601
  for key in self.get_data_keys():
1601
1602
  data = self.dataset.get_data(sub_seq_idx, key)
@@ -169,7 +169,7 @@ class NormalizationData:
169
169
  sumErr = np.sum(np.abs(newSum - oldSum - intermediateSum))
170
170
  if sumErr > NormalizationData.SUMMATION_PRECISION:
171
171
  raise FloatingPointError(
172
- "sums have very different orders of magnitude." " summation error = {}".format(sumErr)
172
+ "sums have very different orders of magnitude. summation error = {}".format(sumErr)
173
173
  )
174
174
  return newSum
175
175
 
@@ -318,9 +318,9 @@ class PostprocessingDataset(CachedDataset2):
318
318
  data_iter = self._iterate_dataset()
319
319
  if self._map_seq_stream is not None:
320
320
  data_iter = self._map_seq_stream(data_iter, epoch=self.epoch, rng=self._rng, **util.get_fwd_compat_kwargs())
321
- assert isinstance(
322
- data_iter, Iterator
323
- ), f"map_seq_stream must produce an {Iterator.__name__}, but produced {type(data_iter).__name__}"
321
+ assert isinstance(data_iter, Iterator), (
322
+ f"map_seq_stream must produce an {Iterator.__name__}, but produced {type(data_iter).__name__}"
323
+ )
324
324
  return _validate_tensor_dict_iter(data_iter)
325
325
 
326
326
  def _iterate_dataset(self) -> Iterator[TensorDict]:
@@ -349,9 +349,9 @@ class PostprocessingDataset(CachedDataset2):
349
349
  tensor_dict = self._map_seq(
350
350
  tensor_dict, epoch=self.epoch, seq_idx=seq_index, rng=self._rng, **util.get_fwd_compat_kwargs()
351
351
  )
352
- assert isinstance(
353
- tensor_dict, TensorDict
354
- ), f"map_seq must produce a {TensorDict.__name__}, but produced {type(tensor_dict).__name__}"
352
+ assert isinstance(tensor_dict, TensorDict), (
353
+ f"map_seq must produce a {TensorDict.__name__}, but produced {type(tensor_dict).__name__}"
354
+ )
355
355
 
356
356
  # Re-adding the seq_tag/complete_frac here causes no harm in case they are dropped
357
357
  # since we don't add/drop any segments w/ the non-iterator postprocessing function.
@@ -367,9 +367,9 @@ class PostprocessingDataset(CachedDataset2):
367
367
  if self._seq_list_for_validation is not None:
368
368
  seq_tag = self._seq_list_for_validation[seq_index]
369
369
  tag_of_seq = tensor_dict.data["seq_tag"].raw_tensor.item()
370
- assert (
371
- tag_of_seq == seq_tag
372
- ), f"seq tag mismath: {tag_of_seq} != {seq_tag} for seq index {seq_index} when seq list is given"
370
+ assert tag_of_seq == seq_tag, (
371
+ f"seq tag mismath: {tag_of_seq} != {seq_tag} for seq index {seq_index} when seq list is given"
372
+ )
373
373
 
374
374
  yield tensor_dict
375
375
  seq_index += 1
@@ -393,13 +393,14 @@ class SprintDatasetBase(Dataset):
393
393
  targets = {"classes": targets}
394
394
  if "classes" in targets:
395
395
  # 'classes' is always the alignment
396
- assert targets["classes"].shape == (
397
- reduce_num_frames,
398
- ), "Number of targets %s does not match number of features %s (reduce factor %d)" % (
399
- # is in format (time,)
400
- targets["classes"].shape,
401
- (num_frames,),
402
- self.reduce_target_factor,
396
+ assert targets["classes"].shape == (reduce_num_frames,), (
397
+ "Number of targets %s does not match number of features %s (reduce factor %d)"
398
+ % (
399
+ # is in format (time,)
400
+ targets["classes"].shape,
401
+ (num_frames,),
402
+ self.reduce_target_factor,
403
+ )
403
404
  )
404
405
  if "speaker_name" in targets:
405
406
  targets["speaker_name"] = targets["speaker_name"].strip()
@@ -2,7 +2,6 @@
2
2
  Operations on strings.
3
3
  """
4
4
 
5
-
6
5
  from __future__ import annotations
7
6
  import numpy
8
7