returnn 1.20230718.124003__tar.gz → 1.20230718.183712__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (404) hide show
  1. {returnn-1.20230718.124003/returnn.egg-info → returnn-1.20230718.183712}/PKG-INFO +1 -1
  2. returnn-1.20230718.183712/_setup_info_generated.py +2 -0
  3. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/_backend.py +2 -0
  4. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/_utils.py +5 -2
  5. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/array_.py +9 -1
  6. returnn-1.20230718.183712/returnn/frontend/device.py +56 -0
  7. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tensor/_dim_extra.py +35 -6
  8. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tensor/_tensor_extra.py +1 -2
  9. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tensor/tensor_dict.py +1 -0
  10. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/frontend_layers/_backend.py +1 -0
  11. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/frontend_low_level/_backend.py +2 -0
  12. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/torch/frontend/_backend.py +19 -5
  13. {returnn-1.20230718.124003 → returnn-1.20230718.183712/returnn.egg-info}/PKG-INFO +1 -1
  14. returnn-1.20230718.124003/_setup_info_generated.py +0 -2
  15. returnn-1.20230718.124003/returnn/frontend/device.py +0 -26
  16. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/.editorconfig +0 -0
  17. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/.gitignore +0 -0
  18. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/.gitmodules +0 -0
  19. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/.kateconfig +0 -0
  20. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/CHANGELOG.md +0 -0
  21. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/CODEOWNERS +0 -0
  22. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/CONTRIBUTING.md +0 -0
  23. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/LICENSE +0 -0
  24. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/MANIFEST.in +0 -0
  25. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/README.rst +0 -0
  26. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/__init__.py +0 -0
  27. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/12AX.cluster_map +0 -0
  28. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/_setup_returnn_env.py +0 -0
  29. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-fwd.config +0 -0
  30. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-horovod-mpi.py +0 -0
  31. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-horovod-mpi.py.sh +0 -0
  32. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-horovod-mpi.sh +0 -0
  33. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-hyper-param-tuning.config +0 -0
  34. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-iter-dataset.py +0 -0
  35. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-list-devices.py +0 -0
  36. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-lua-torch-layer.config +0 -0
  37. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-pretrain.config +0 -0
  38. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-record-and-push-to-webserver.py +0 -0
  39. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-returnn-as-framework.py +0 -0
  40. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-rf.config +0 -0
  41. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-rhn-enwik8.config +0 -0
  42. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-sprint-interface.py +0 -0
  43. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-tf-att-copy.config +0 -0
  44. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-tf-attention.config +0 -0
  45. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-tf-chunking-blstm.12ax.config +0 -0
  46. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-tf-contribrnn-lstm.12ax.config +0 -0
  47. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-tf-enc-dec.config +0 -0
  48. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-tf-hard-att-copy.config +0 -0
  49. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-tf-lstm-benchmark.py +0 -0
  50. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-tf-maxgradnorm-lstm.12ax.config +0 -0
  51. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-tf-native-lstm-lowmem.12ax.config +0 -0
  52. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-tf-native-lstm.12ax.config +0 -0
  53. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-tf-native-lstm2.12ax.config +0 -0
  54. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-tf-native-lstm2.12ax.tuned.config +0 -0
  55. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-tf-neural-transducer.12ax.config +0 -0
  56. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-tf-rec-explicit-lstm.config +0 -0
  57. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-tf-rec-explicit-rnn.config +0 -0
  58. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-tf-rec-self-att.config +0 -0
  59. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-tf-search-compiled-graph.py +0 -0
  60. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-tf-vanilla-lstm.12ax.config +0 -0
  61. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-timit-lstm-ctc.config +0 -0
  62. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-torch.config +0 -0
  63. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo-upd-mult-model.lstm.12ax.config +0 -0
  64. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/demo.sh +0 -0
  65. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/IAM/IAM_lines/a01-000u-00.png +0 -0
  66. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/IAM/IAM_lines/a01-007-04.png +0 -0
  67. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/IAM/IAM_lines/a01-007-06.png +0 -0
  68. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/IAM/README.txt +0 -0
  69. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/IAM/chars.txt +0 -0
  70. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/IAM/config_demo +0 -0
  71. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/IAM/config_fwd +0 -0
  72. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/IAM/config_real +0 -0
  73. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/IAM/create_IAM_dataset.py +0 -0
  74. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/IAM/decode.py +0 -0
  75. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/IAM/features/raw/demo.h5 +0 -0
  76. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/IAM/go.sh +0 -0
  77. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/IAM/lines.txt +0 -0
  78. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/IAM/split/eval.txt +0 -0
  79. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/IAM/split/train.txt +0 -0
  80. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/IAM/split/valid.txt +0 -0
  81. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/README.md +0 -0
  82. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/artificial/create_test_h5.py +0 -0
  83. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/artificial/forwardconfig +0 -0
  84. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/artificial/go.sh +0 -0
  85. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/artificial/trainconfig +0 -0
  86. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/artificial_rgb/create_test_h5.py +0 -0
  87. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/artificial_rgb/forwardconfig +0 -0
  88. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/artificial_rgb/go.sh +0 -0
  89. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/demos/mdlstm/artificial_rgb/trainconfig +0 -0
  90. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/pyproject.toml +0 -0
  91. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/requirements.txt +0 -0
  92. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/__init__.py +0 -0
  93. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/__main__.py +0 -0
  94. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/__old_mod_loader__.py +0 -0
  95. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/__setup__.py +0 -0
  96. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/config.py +0 -0
  97. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/datasets/__init__.py +0 -0
  98. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/datasets/audio.py +0 -0
  99. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/datasets/basic.py +0 -0
  100. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/datasets/bundle_file.py +0 -0
  101. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/datasets/cached.py +0 -0
  102. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/datasets/cached2.py +0 -0
  103. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/datasets/generating.py +0 -0
  104. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/datasets/hdf.py +0 -0
  105. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/datasets/lm.py +0 -0
  106. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/datasets/map.py +0 -0
  107. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/datasets/meta.py +0 -0
  108. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/datasets/multi_proc.py +0 -0
  109. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/datasets/normalization_data.py +0 -0
  110. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/datasets/numpy_dump.py +0 -0
  111. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/datasets/raw_wav.py +0 -0
  112. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/datasets/sprint.py +0 -0
  113. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/datasets/stereo.py +0 -0
  114. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/datasets/util/__init__.py +0 -0
  115. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/datasets/util/feature_extraction.py +0 -0
  116. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/datasets/util/vocabulary.py +0 -0
  117. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/engine/__init__.py +0 -0
  118. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/engine/base.py +0 -0
  119. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/engine/batch.py +0 -0
  120. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/__init__.py +0 -0
  121. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/__main__.py +0 -0
  122. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/.git +0 -0
  123. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/.gitignore +0 -0
  124. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/LICENSE +0 -0
  125. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/README.md +0 -0
  126. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/aligner.gif +0 -0
  127. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/check.png +0 -0
  128. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/core.cu +0 -0
  129. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/core.h +0 -0
  130. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/core_cpu.cpp +0 -0
  131. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/pytorch_binding/LICENSE +0 -0
  132. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/pytorch_binding/MANIFEST.in +0 -0
  133. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/pytorch_binding/README.md +0 -0
  134. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/pytorch_binding/binding.cpp +0 -0
  135. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/pytorch_binding/core.cu +0 -0
  136. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/pytorch_binding/core.h +0 -0
  137. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/pytorch_binding/requirements.txt +0 -0
  138. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/pytorch_binding/setup.py +0 -0
  139. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/pytorch_binding/warp_rna/__init__.py +0 -0
  140. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/pytorch_binding/warp_rna/test.py +0 -0
  141. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/ref_rna.py +0 -0
  142. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/setup.py +0 -0
  143. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/src/warp_rna_op.cc +0 -0
  144. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/src/warp_rna_op_kernel_tmpl.h +0 -0
  145. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/warp_rna/__init__.py +0 -0
  146. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/WarpRna/warp-rna/test.cpp +0 -0
  147. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/__init__.py +0 -0
  148. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/graph_editor/README.md +0 -0
  149. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/graph_editor/__init__.py +0 -0
  150. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/graph_editor/edit.py +0 -0
  151. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/graph_editor/reroute.py +0 -0
  152. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/graph_editor/select.py +0 -0
  153. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/graph_editor/subgraph.py +0 -0
  154. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/graph_editor/transform.py +0 -0
  155. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/extern/graph_editor/util.py +0 -0
  156. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/forward_iface.py +0 -0
  157. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/__init__.py +0 -0
  158. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/_numpy_backend.py +0 -0
  159. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/attention.py +0 -0
  160. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/cond.py +0 -0
  161. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/const.py +0 -0
  162. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/container.py +0 -0
  163. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/control_flow_ctx.py +0 -0
  164. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/conv.py +0 -0
  165. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/dims.py +0 -0
  166. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/dropout.py +0 -0
  167. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/dtype.py +0 -0
  168. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/encoder/__init__.py +0 -0
  169. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/encoder/base.py +0 -0
  170. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/encoder/conformer.py +0 -0
  171. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/gradient.py +0 -0
  172. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/init.py +0 -0
  173. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/linear.py +0 -0
  174. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/loop.py +0 -0
  175. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/loss.py +0 -0
  176. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/math_.py +0 -0
  177. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/matmul.py +0 -0
  178. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/module.py +0 -0
  179. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/normalization.py +0 -0
  180. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/parameter.py +0 -0
  181. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/rand.py +0 -0
  182. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/rec.py +0 -0
  183. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/reduce.py +0 -0
  184. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/run_ctx.py +0 -0
  185. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/signal.py +0 -0
  186. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/state.py +0 -0
  187. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/tensor_array.py +0 -0
  188. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/frontend/types.py +0 -0
  189. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/import_/__init__.py +0 -0
  190. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/import_/common.py +0 -0
  191. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/import_/git.py +0 -0
  192. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/import_/import_.py +0 -0
  193. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/learning_rate_control.py +0 -0
  194. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/log.py +0 -0
  195. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/native_op.cpp +0 -0
  196. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/native_op.py +0 -0
  197. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/pretrain.py +0 -0
  198. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/sprint/__init__.py +0 -0
  199. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/sprint/cache.py +0 -0
  200. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/sprint/control.py +0 -0
  201. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/sprint/error_signals.py +0 -0
  202. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/sprint/extern_interface.py +0 -0
  203. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/sprint/interface.py +0 -0
  204. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tensor/README.md +0 -0
  205. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tensor/__init__.py +0 -0
  206. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tensor/_tensor_mixin_base.py +0 -0
  207. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tensor/_tensor_op_overloads.py +0 -0
  208. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tensor/control_flow_ctx.py +0 -0
  209. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tensor/dim.py +0 -0
  210. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tensor/marked_dim.py +0 -0
  211. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tensor/tensor.py +0 -0
  212. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tensor/utils.py +0 -0
  213. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/__init__.py +0 -0
  214. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/compat.py +0 -0
  215. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/data_pipeline.py +0 -0
  216. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/distributed.py +0 -0
  217. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/engine.py +0 -0
  218. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/frontend_layers/__init__.py +0 -0
  219. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/frontend_layers/_utils.py +0 -0
  220. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/frontend_layers/cond.py +0 -0
  221. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/frontend_layers/config_entry_points.py +0 -0
  222. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/frontend_layers/debug_eager_mode.py +0 -0
  223. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/frontend_layers/dims.py +0 -0
  224. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/frontend_layers/layer.py +0 -0
  225. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/frontend_layers/make_layer.py +0 -0
  226. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/frontend_layers/parameter_assign.py +0 -0
  227. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/frontend_layers/prev_tensor_ref.py +0 -0
  228. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/frontend_low_level/__init__.py +0 -0
  229. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/horovod.py +0 -0
  230. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/hyper_param_tuning.py +0 -0
  231. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/layers/__init__.py +0 -0
  232. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/layers/base.py +0 -0
  233. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/layers/basic.py +0 -0
  234. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/layers/rec.py +0 -0
  235. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/layers/segmental_model.py +0 -0
  236. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/layers/signal_processing.py +0 -0
  237. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/layers/variable.py +0 -0
  238. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/native_op.py +0 -0
  239. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/network.py +0 -0
  240. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/sprint.py +0 -0
  241. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/updater.py +0 -0
  242. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/util/__init__.py +0 -0
  243. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/util/basic.py +0 -0
  244. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/util/data.py +0 -0
  245. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/util/gradient_checkpoint.py +0 -0
  246. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/util/ken_lm.py +0 -0
  247. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/tf/util/open_fst.py +0 -0
  248. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/torch/README.md +0 -0
  249. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/torch/__init__.py +0 -0
  250. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/torch/data/__init__.py +0 -0
  251. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/torch/data/pipeline.py +0 -0
  252. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/torch/data/returnn_dataset_wrapper.py +0 -0
  253. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/torch/data/tensor_utils.py +0 -0
  254. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/torch/distributed.py +0 -0
  255. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/torch/engine.py +0 -0
  256. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/torch/frontend/__init__.py +0 -0
  257. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/torch/frontend/_rand.py +0 -0
  258. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/torch/frontend/bridge.py +0 -0
  259. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/torch/functional/README.md +0 -0
  260. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/torch/functional/__init__.py +0 -0
  261. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/torch/updater.py +0 -0
  262. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/util/__init__.py +0 -0
  263. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/util/basic.py +0 -0
  264. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/util/better_exchook.py +0 -0
  265. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/util/bpe.py +0 -0
  266. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/util/debug.py +0 -0
  267. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/util/debug_helpers.py +0 -0
  268. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/util/fsa.py +0 -0
  269. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/util/literal_py_to_pickle.py +0 -0
  270. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/util/pprint.py +0 -0
  271. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/util/py-to-pickle.cpp +0 -0
  272. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/util/py_compat.py +0 -0
  273. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/util/sig_proc.py +0 -0
  274. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn/util/task_system.py +0 -0
  275. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn.egg-info/SOURCES.txt +0 -0
  276. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn.egg-info/dependency_links.txt +0 -0
  277. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/returnn.egg-info/top_level.txt +0 -0
  278. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/rnn.py +0 -0
  279. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/setup.cfg +0 -0
  280. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/setup.py +0 -0
  281. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/DummySprintExec.py +0 -0
  282. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/PyCharm-inspection-profile.xml +0 -0
  283. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/PyCharm.idea/.gitignore +0 -0
  284. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/PyCharm.idea/.name +0 -0
  285. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/PyCharm.idea/codeStyleSettings.xml +0 -0
  286. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/PyCharm.idea/codeStyles/Project.xml +0 -0
  287. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/PyCharm.idea/codeStyles/codeStyleConfig.xml +0 -0
  288. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/PyCharm.idea/inspectionProfiles/Project_Default.xml +0 -0
  289. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/PyCharm.idea/inspectionProfiles/profiles_settings.xml +0 -0
  290. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/PyCharm.idea/misc.xml +0 -0
  291. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/PyCharm.idea/modules.xml +0 -0
  292. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/PyCharm.idea/returnn.iml +0 -0
  293. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/PyCharm.idea/scopes/scope_settings.xml +0 -0
  294. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/_set_num_threads1.py +0 -0
  295. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/_setup_returnn_env.py +0 -0
  296. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/_setup_test_env.py +0 -0
  297. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/bpe-unicode-demo.codes +0 -0
  298. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/bpe-unicode-demo.vocab +0 -0
  299. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/lexicon_opt.fst +0 -0
  300. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/lexicon_opt.isyms +0 -0
  301. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/lexicon_opt.jpg +0 -0
  302. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/lexicon_opt.osyms +0 -0
  303. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/lint_common.py +0 -0
  304. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/pycharm-inspect.py +0 -0
  305. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/pylint.py +0 -0
  306. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/returnn-as-framework.py +0 -0
  307. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/rf_utils.py +0 -0
  308. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/spelling.dic +0 -0
  309. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_Config.py +0 -0
  310. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_Dataset.py +0 -0
  311. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_Fsa.py +0 -0
  312. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_GeneratingDataset.py +0 -0
  313. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_HDFDataset.py +0 -0
  314. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_LearningRateControl.py +0 -0
  315. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_Log.py +0 -0
  316. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_MultiProcDataset.py +0 -0
  317. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_PTDataset.py +0 -0
  318. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_Pretrain.py +0 -0
  319. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_ResNet.py +0 -0
  320. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_SprintDataset.py +0 -0
  321. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_SprintInterface.py +0 -0
  322. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_TFEngine.py +0 -0
  323. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_TFNativeOp.py +0 -0
  324. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_TFNetworkLayer.py +0 -0
  325. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_TFNetworkRecLayer.py +0 -0
  326. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_TFNetworkSigProcLayer.py +0 -0
  327. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_TFUpdater.py +0 -0
  328. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_TFUtil.py +0 -0
  329. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_TF_determinism.py +0 -0
  330. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_TaskSystem.py +0 -0
  331. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_TaskSystem_SharedMem.py +0 -0
  332. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_TranslationDataset.py +0 -0
  333. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_Util.py +0 -0
  334. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_demos.py +0 -0
  335. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_fork_exec.py +0 -0
  336. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_hdf_dump.py +0 -0
  337. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_rf_array.py +0 -0
  338. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_rf_attention.py +0 -0
  339. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_rf_base.py +0 -0
  340. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_rf_cond.py +0 -0
  341. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_rf_const.py +0 -0
  342. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_rf_container.py +0 -0
  343. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_rf_conv.py +0 -0
  344. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_rf_encoder_conformer.py +0 -0
  345. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_rf_loop.py +0 -0
  346. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_rf_math.py +0 -0
  347. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_rf_normalization.py +0 -0
  348. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_rf_rec.py +0 -0
  349. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_rf_reduce.py +0 -0
  350. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_rf_signal.py +0 -0
  351. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_tensor.py +0 -0
  352. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_tools.py +0 -0
  353. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_torch_engine.py +0 -0
  354. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_torch_frontend.py +0 -0
  355. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tests/test_torch_internal_frontend.py +0 -0
  356. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/_setup_returnn_env.py +0 -0
  357. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/analyze-dataset-batches.py +0 -0
  358. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/bliss-collect-seq-lens.py +0 -0
  359. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/bliss-dump-text.py +0 -0
  360. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/bliss-get-segment-names.py +0 -0
  361. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/bliss-to-ogg-zip.py +0 -0
  362. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/bpe-create-lexicon.py +0 -0
  363. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/calculate-word-error-rate.py +0 -0
  364. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/cleanup-old-models.py +0 -0
  365. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/collect-orth-symbols.py +0 -0
  366. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/collect-words.py +0 -0
  367. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/compile_native_op.py +0 -0
  368. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/compile_tf_graph.py +0 -0
  369. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/debug-dump-search-scores.py +0 -0
  370. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/debug-plot-search-scores.py +0 -0
  371. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/dump-dataset-raw-strings.py +0 -0
  372. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/dump-dataset.py +0 -0
  373. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/dump-forward-stats.py +0 -0
  374. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/dump-forward.py +0 -0
  375. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/dump-network-json.py +0 -0
  376. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/dump-pickle.py +0 -0
  377. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/extract_state_tying_from_dataset.py +0 -0
  378. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/get-attention-weights.py +0 -0
  379. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/get-best-model-epoch.py +0 -0
  380. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/hdf_dump.py +0 -0
  381. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/hdf_dump_translation_dataset.py +0 -0
  382. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/import-blocks-mt-model.py +0 -0
  383. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/import-t2t-mt-model.py +0 -0
  384. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/lattice_rescorer/.gitignore +0 -0
  385. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/lattice_rescorer/Makefile +0 -0
  386. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/lattice_rescorer/README.md +0 -0
  387. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/lattice_rescorer/example/README.md +0 -0
  388. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/lattice_rescorer/example/libs_list +0 -0
  389. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/lattice_rescorer/example/network.040/i600_m600_m600.sgd_b16_lr0_cl2.newbobabs.config +0 -0
  390. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/lattice_rescorer/example/network.040/i600_m600_m600.sgd_b16_lr0_cl2.newbobabs.keep_over_epoch.lstm2.config +0 -0
  391. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/lattice_rescorer/example/rescore_lattice.sh +0 -0
  392. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/lattice_rescorer/example/state_vars_list +0 -0
  393. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/lattice_rescorer/example/tensor_names_list +0 -0
  394. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/lattice_rescorer/file.h +0 -0
  395. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/lattice_rescorer/htklatticerescorer.cc +0 -0
  396. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/lattice_rescorer/htklatticerescorer.h +0 -0
  397. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/lattice_rescorer/main.cc +0 -0
  398. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/lattice_rescorer/rescorer.h +0 -0
  399. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/lattice_rescorer/vocabulary.cc +0 -0
  400. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/lattice_rescorer/vocabulary.h +0 -0
  401. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/tf_avg_checkpoints.py +0 -0
  402. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/tf_inspect_checkpoint.py +0 -0
  403. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/tf_inspect_summary_log.py +0 -0
  404. {returnn-1.20230718.124003 → returnn-1.20230718.183712}/tools/torch_export_to_onnx.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: returnn
3
- Version: 1.20230718.124003
3
+ Version: 1.20230718.183712
4
4
  Summary: The RWTH extensible training framework for universal recurrent neural networks
5
5
  Home-page: https://github.com/rwth-i6/returnn/
6
6
  Author: Albert Zeyer
@@ -0,0 +1,2 @@
1
+ version = '1.20230718.183712'
2
+ long_version = '1.20230718.183712+git.b77467a'
@@ -653,6 +653,7 @@ class Backend(Generic[T]):
653
653
  dims: Sequence[Dim],
654
654
  dtype: str,
655
655
  sparse_dim: Optional[Dim] = None,
656
+ device: Optional[str] = None,
656
657
  name: Optional[str] = None,
657
658
  ) -> Tensor[T]:
658
659
  """
@@ -660,6 +661,7 @@ class Backend(Generic[T]):
660
661
  :param dims:
661
662
  :param dtype:
662
663
  :param sparse_dim:
664
+ :param device:
663
665
  :param name:
664
666
  :return: tensor
665
667
  """
@@ -79,13 +79,16 @@ def bin_op_out_template(
79
79
  :return: out, a, b
80
80
  """
81
81
  src_dtype = None
82
+ src_device = None
82
83
  if isinstance(a, Tensor):
83
84
  src_dtype = a.dtype
85
+ src_device = a.device
84
86
  elif isinstance(b, Tensor):
85
87
  src_dtype = b.dtype
86
- a = rf.convert_to_tensor(a, dtype=src_dtype, _backend=backend)
88
+ src_device = b.device
89
+ a = rf.convert_to_tensor(a, dtype=src_dtype, device=src_device, keep_scalar_on_cpu=allow_scalar, _backend=backend)
87
90
  src_dtype = src_dtype or a.dtype
88
- b = rf.convert_to_tensor(b, dtype=src_dtype, _backend=backend)
91
+ b = rf.convert_to_tensor(b, dtype=src_dtype, device=src_device, keep_scalar_on_cpu=allow_scalar, _backend=backend)
89
92
  # sanity checks
90
93
  # noinspection PyProtectedMember
91
94
  assert a._raw_backend == b._raw_backend, "Cannot combine tensors from two different frontends, e.g. TF and PT"
@@ -46,6 +46,8 @@ def convert_to_tensor(
46
46
  dtype: Optional[str] = None,
47
47
  sparse_dim: Optional[Dim] = None,
48
48
  shape: Sequence[Dim] = None,
49
+ device: Optional[str] = None,
50
+ keep_scalar_on_cpu: bool = False,
49
51
  name: Optional[str] = None,
50
52
  _backend: Optional[Type[Backend]] = None,
51
53
  ) -> Tensor[T]:
@@ -56,6 +58,8 @@ def convert_to_tensor(
56
58
  :param sparse_dim:
57
59
  :param shape: alias for dims, for some older code
58
60
  :param name:
61
+ :param device:
62
+ :param keep_scalar_on_cpu: if the value is already on the CPU, keep it there, even if `device` is sth else
59
63
  :param _backend:
60
64
  :return: tensor
61
65
  """
@@ -81,6 +85,8 @@ def convert_to_tensor(
81
85
  dtype = value.dtype.name
82
86
  else:
83
87
  raise ValueError(f"number {value} type {type(value)} needs explicit `dtype` specification")
88
+ if keep_scalar_on_cpu:
89
+ device = "cpu"
84
90
  elif isinstance(value, numpy.ndarray):
85
91
  if _backend is None:
86
92
  # Small exception: Do not use the NumpyBackend but the global backend in this case.
@@ -100,7 +106,9 @@ def convert_to_tensor(
100
106
  ]
101
107
  if dtype is None:
102
108
  dtype = value_backend.get_dtype_name_raw(value)
103
- return _backend.convert_to_tensor(value=value, dims=dims, dtype=dtype, sparse_dim=sparse_dim, name=name)
109
+ return _backend.convert_to_tensor(
110
+ value=value, dims=dims, dtype=dtype, sparse_dim=sparse_dim, device=device, name=name
111
+ )
104
112
 
105
113
 
106
114
  constant = convert_to_tensor # alias for some older code
@@ -0,0 +1,56 @@
1
+ """
2
+ Device handling.
3
+ """
4
+
5
+
6
+ from __future__ import annotations
7
+ from typing import Optional
8
+ from contextlib import contextmanager
9
+ from returnn.tensor import Tensor
10
+
11
+
12
+ __all__ = ["copy_to_device", "get_default_device", "set_default_device_ctx"]
13
+
14
+
15
+ _default_device: Optional[str] = None
16
+
17
+
18
+ def copy_to_device(x: Tensor, device: Optional[str] = None) -> Tensor:
19
+ """
20
+ Copy tensor to device.
21
+
22
+ :param x: tensor
23
+ :param device:
24
+ :return: tensor on device
25
+ """
26
+ if not device:
27
+ device = get_default_device()
28
+ if not device:
29
+ return x
30
+ if x.raw_tensor is None:
31
+ return x
32
+ if x.device == device:
33
+ return x
34
+ # noinspection PyProtectedMember
35
+ return x._raw_backend.copy_to_device(x, device)
36
+
37
+
38
+ def get_default_device() -> Optional[str]:
39
+ """
40
+ :return: default device, where to put new tensors (via random number generators, constant, range_over_dim, etc)
41
+ """
42
+ return _default_device
43
+
44
+
45
+ @contextmanager
46
+ def set_default_device_ctx(device: Optional[str]):
47
+ """
48
+ :param device: see :func:`get_default_device`
49
+ """
50
+ global _default_device
51
+ old_device = _default_device
52
+ try:
53
+ _default_device = device
54
+ yield
55
+ finally:
56
+ _default_device = old_device
@@ -114,6 +114,8 @@ class _DimExtra:
114
114
  # We can have different tag variants per batch info (e.g. with beam), or per control flow ctx.
115
115
  # They each have same_as = self. The same_base should have the base (global) batch info.
116
116
  self.same_for_batch_ctx = {} # type: Dict[Tuple[BatchInfo,Optional[ControlFlowContext]],_d.Dim]
117
+ self.cache_dyn_size_ext_dev = {} # type: Dict[str,_t.Tensor] # device -> dyn_size_ext
118
+ self.cache_seq_mask = {} # type: Dict[str,_t.Tensor] # device -> seq_mask
117
119
 
118
120
  def __getstate__(self):
119
121
  d = vars(self).copy()
@@ -349,6 +351,9 @@ class _DimMixin:
349
351
  """
350
352
  if self.dyn_size_ext:
351
353
  self.dyn_size_ext.raw_tensor = None
354
+ if self._extra:
355
+ self._extra.cache_dyn_size_ext_dev.clear()
356
+ self._extra.cache_seq_mask.clear()
352
357
 
353
358
  def _can_use_in_ctx(self, ctx):
354
359
  """
@@ -701,10 +706,23 @@ class _DimMixin:
701
706
  )
702
707
  self.dyn_size_ext.placeholder = dyn_size
703
708
 
704
- def get_mask(self: Dim, *, dim_order: Optional[Sequence[Dim]] = None) -> _t.Tensor:
709
+ def _get_dyn_size_ext_for_device(self: Dim, device: Optional[str]) -> _t.Tensor:
710
+ if not device:
711
+ return self.dyn_size_ext
712
+
713
+ import returnn.frontend as rf
714
+
715
+ self._make_extra()
716
+ if device in self._extra.cache_dyn_size_ext_dev:
717
+ return self._extra.cache_dyn_size_ext_dev[device]
718
+ self._extra.cache_dyn_size_ext_dev[device] = rf.copy_to_device(self.dyn_size_ext, device=device)
719
+ return self._extra.cache_dyn_size_ext_dev[device]
720
+
721
+ def get_mask(self: Dim, *, dim_order: Optional[Sequence[Dim]] = None, device: Optional[str] = None) -> _t.Tensor:
705
722
  """
706
723
  :param dim_order: if given, the dims of the mask will be in this order.
707
724
  This can be useful if the mask is broadcasted against some other tensor.
725
+ :param str|None device: if given, will move the mask to this device
708
726
  :return: if need_masking(), the corresponding mask.
709
727
  If this is e.g. the time-dim T of shape [B], then the mask will be of shape [B,T].
710
728
  The mask could be used with :func:`masked_select` (``boolean_mask``) or ``where``.
@@ -715,9 +733,17 @@ class _DimMixin:
715
733
  # noinspection PyProtectedMember
716
734
  backend = self.dyn_size_ext._raw_backend
717
735
 
736
+ if not device:
737
+ device = rf.get_default_device()
738
+
739
+ if self._extra and device in self._extra.cache_seq_mask:
740
+ return self._extra.cache_seq_mask[device]
741
+
742
+ size_ext = self._get_dyn_size_ext_for_device(device)
743
+
718
744
  max_idx = rf.reduce(
719
- self.dyn_size_ext,
720
- axis=self.dyn_size_ext.dims,
745
+ size_ext,
746
+ axis=size_ext.dims,
721
747
  mode="max",
722
748
  # Masking here is not always possible, e.g. if we have
723
749
  # tag = Dim{'self-att-keys'['time:var:extern_data:classes'[B]]}
@@ -731,9 +757,11 @@ class _DimMixin:
731
757
  # and this likely produces nan in backprop or elsewhere.
732
758
  # Thus, mask size_ext itself, and set the padded values to 1.
733
759
  # This assumes that max_idx >= 1.
734
- size_ext = self.dyn_size_ext.copy_masked(max_idx)
735
- idx_range = backend.range_over_dim(self)
760
+ size_ext = size_ext.copy_masked(max_idx)
761
+ with rf.set_default_device_ctx(device):
762
+ idx_range = backend.range_over_dim(self)
736
763
  seq_mask = rf.compare(idx_range, "<", size_ext, allow_broadcast_all_sources=True, dim_order=dim_order)
764
+ self._make_extra().cache_seq_mask[device] = seq_mask
737
765
  return seq_mask
738
766
 
739
767
  def is_batch_dim(self):
@@ -1053,7 +1081,8 @@ class _DimMixin:
1053
1081
  assert x.dimension is not None
1054
1082
  if y is None:
1055
1083
  if not template_only and backend and not tf:
1056
- y = backend.convert_to_tensor(x.dimension, dims=[], dtype=size_dtype, name=y_name)
1084
+ with rf.set_default_device_ctx(None):
1085
+ y = backend.convert_to_tensor(x.dimension, dims=[], dtype=size_dtype, name=y_name)
1057
1086
  else:
1058
1087
  y = _t.Tensor(
1059
1088
  name=y_name,
@@ -2730,7 +2730,6 @@ class _TensorMixin(_TensorMixinBase):
2730
2730
  if BT or TB major, and axis is T or None.
2731
2731
  In general compatible to placeholder, i.e. same ndim, with broadcast dims.
2732
2732
  We assert here that the axis is dynamic (:func:`is_axis_dynamic`), i.e. we have the size.
2733
- :rtype: tf.Tensor
2734
2733
  """
2735
2734
  if isinstance(axis, Dim):
2736
2735
  axis = self.get_axis_from_description(axis)
@@ -2781,7 +2780,7 @@ class _TensorMixin(_TensorMixinBase):
2781
2780
  assert 0 <= axis < self.batch_ndim
2782
2781
  assert axis != self.batch_dim_axis
2783
2782
  tag: Dim = self.dim_tags[axis]
2784
- return tag.get_mask(dim_order=self.dims)
2783
+ return tag.get_mask(dim_order=self.dims, device=self.device)
2785
2784
 
2786
2785
  def get_sequence_lengths_broadcast(self, axis=None):
2787
2786
  """
@@ -68,6 +68,7 @@ class TensorDict:
68
68
  dim.batch = None
69
69
  if dim.dyn_size_ext:
70
70
  dim.dyn_size_ext.reset()
71
+ dim.reset_eager()
71
72
 
72
73
  def copy_template(self) -> TensorDict:
73
74
  """copy template"""
@@ -431,6 +431,7 @@ class ReturnnLayersBackend(Backend[Layer]):
431
431
  dims: Sequence[Dim],
432
432
  dtype: str,
433
433
  sparse_dim: Optional[Dim] = None,
434
+ device: Optional[str] = None,
434
435
  name: Optional[str] = None,
435
436
  ) -> Tensor[Layer]:
436
437
  """convert to tensor"""
@@ -370,6 +370,7 @@ class TFBackend(Backend[tf.Tensor]):
370
370
  dims: Sequence[Dim],
371
371
  dtype: str,
372
372
  sparse_dim: Optional[Dim] = None,
373
+ device: Optional[str] = None,
373
374
  name: Optional[str] = None,
374
375
  ) -> _TT:
375
376
  """
@@ -377,6 +378,7 @@ class TFBackend(Backend[tf.Tensor]):
377
378
  :param dims:
378
379
  :param dtype:
379
380
  :param sparse_dim:
381
+ :param device:
380
382
  :param name:
381
383
  :return: tensor
382
384
  """
@@ -525,7 +525,11 @@ class TorchBackend(Backend[torch.Tensor]):
525
525
  :return: parameter
526
526
  """
527
527
  assert all(d.is_static() for d in tensor.dims)
528
- data = torch.zeros([d.dimension for d in tensor.dims], dtype=TorchBackend.as_dtype_raw(tensor.dtype))
528
+ data = torch.zeros(
529
+ [d.dimension for d in tensor.dims],
530
+ dtype=TorchBackend.as_dtype_raw(tensor.dtype),
531
+ device=rf.get_default_device(),
532
+ )
529
533
  if tensor.dtype.startswith("int"):
530
534
  requires_grad = False
531
535
  else:
@@ -645,6 +649,7 @@ class TorchBackend(Backend[torch.Tensor]):
645
649
  dims: Sequence[Dim],
646
650
  dtype: str,
647
651
  sparse_dim: Optional[Dim] = None,
652
+ device: Optional[str] = None,
648
653
  name: Optional[str] = None,
649
654
  ) -> Tensor[torch.Tensor]:
650
655
  """
@@ -652,6 +657,7 @@ class TorchBackend(Backend[torch.Tensor]):
652
657
  :param dims:
653
658
  :param dtype:
654
659
  :param sparse_dim:
660
+ :param device:
655
661
  :param name:
656
662
  :return: tensor
657
663
  """
@@ -661,7 +667,11 @@ class TorchBackend(Backend[torch.Tensor]):
661
667
  name = name or "raw_tensor"
662
668
  else:
663
669
  name = name or "const"
664
- value = torch.tensor(value, dtype=TorchBackend.as_dtype_raw(dtype))
670
+ value = torch.tensor(
671
+ value,
672
+ dtype=TorchBackend.as_dtype_raw(dtype),
673
+ device=device or rf.get_default_device(),
674
+ )
665
675
  assert isinstance(value, torch.Tensor)
666
676
  return Tensor(name, dims=dims, dtype=dtype, sparse_dim=sparse_dim, raw_tensor=value)
667
677
 
@@ -682,7 +692,9 @@ class TorchBackend(Backend[torch.Tensor]):
682
692
  # onnx::ConstantOfShape (via torch.full) must get shape as int64.
683
693
  # https://github.com/rwth-i6/returnn/issues/1333#issuecomment-1607236783
684
694
  shape = [dim.long() if isinstance(dim, torch.Tensor) else dim for dim in shape]
685
- raw_tensor = torch.full(shape, fill_value, dtype=TorchBackend.as_dtype_raw(dtype))
695
+ raw_tensor = torch.full(
696
+ shape, fill_value, dtype=TorchBackend.as_dtype_raw(dtype), device=rf.get_default_device()
697
+ )
686
698
  return Tensor(
687
699
  "full", dims=dims, sparse_dim=sparse_dim, feature_dim=feature_dim, dtype=dtype, raw_tensor=raw_tensor
688
700
  )
@@ -934,7 +946,9 @@ class TorchBackend(Backend[torch.Tensor]):
934
946
  sparse_dim=dim,
935
947
  dtype=dtype,
936
948
  )
937
- out.raw_tensor = torch.arange(dim.get_dim_value(), dtype=TorchBackend.as_dtype_raw(out.dtype))
949
+ out.raw_tensor = torch.arange(
950
+ dim.get_dim_value(), dtype=TorchBackend.as_dtype_raw(out.dtype), device=rf.get_default_device()
951
+ )
938
952
  return out
939
953
 
940
954
  @staticmethod
@@ -1084,7 +1098,7 @@ class TorchBackend(Backend[torch.Tensor]):
1084
1098
  out = Tensor(
1085
1099
  name=f"random_{distribution}", dims=dims, dtype=dtype, sparse_dim=sparse_dim, feature_dim=feature_dim
1086
1100
  )
1087
- out.raw_tensor = torch.empty(shape, dtype=dtype_)
1101
+ out.raw_tensor = torch.empty(shape, dtype=dtype_, device=rf.get_default_device())
1088
1102
  assert explicit_state is None # not implemented otherwise
1089
1103
  generator = None # using the global default from PT
1090
1104
  assert isinstance(static, bool)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: returnn
3
- Version: 1.20230718.124003
3
+ Version: 1.20230718.183712
4
4
  Summary: The RWTH extensible training framework for universal recurrent neural networks
5
5
  Home-page: https://github.com/rwth-i6/returnn/
6
6
  Author: Albert Zeyer
@@ -1,2 +0,0 @@
1
- version = '1.20230718.124003'
2
- long_version = '1.20230718.124003+git.88cdf02'
@@ -1,26 +0,0 @@
1
- """
2
- Device handling.
3
- """
4
-
5
-
6
- from __future__ import annotations
7
- from typing import Optional
8
- from returnn.tensor import Tensor
9
-
10
-
11
- def copy_to_device(x: Tensor, device: Optional[str]) -> Tensor:
12
- """
13
- Copy tensor to device.
14
-
15
- :param x: tensor
16
- :param device:
17
- :return: tensor on device
18
- """
19
- if not device:
20
- return x
21
- if x.raw_tensor is None:
22
- return x
23
- if x.device == device:
24
- return x
25
- # noinspection PyProtectedMember
26
- return x._raw_backend.copy_to_device(x, device)