returnn 1.20250120.153919__tar.gz → 1.20250122.134518__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of returnn might be problematic. Click here for more details.
- {returnn-1.20250120.153919/returnn.egg-info → returnn-1.20250122.134518}/PKG-INFO +1 -1
- returnn-1.20250122.134518/_setup_info_generated.py +2 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/_numpy_backend.py +1 -1
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/conv.py +1 -1
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/dims.py +1 -1
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tensor/_dim_extra.py +37 -32
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tensor/_tensor_extra.py +5 -5
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tensor/dim.py +1 -1
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tensor/tensor_dict.py +3 -3
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tensor/utils.py +3 -3
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/frontend_layers/_backend.py +1 -1
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/frontend_layers/layer.py +1 -1
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/frontend_layers/loop.py +1 -1
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/frontend_layers/make_layer.py +1 -1
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/frontend_low_level/_backend.py +1 -1
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/layers/base.py +1 -1
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/layers/basic.py +4 -4
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/layers/rec.py +3 -3
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/engine.py +1 -1
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/frontend/_backend.py +3 -3
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/util/exception_helper.py +1 -1
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/better_exchook.py +47 -29
- {returnn-1.20250120.153919 → returnn-1.20250122.134518/returnn.egg-info}/PKG-INFO +1 -1
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/rf_utils.py +3 -3
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_TFUtil.py +1 -1
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_rf_rec.py +1 -1
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/torch_export_to_onnx.py +1 -1
- returnn-1.20250120.153919/_setup_info_generated.py +0 -2
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/.editorconfig +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/.gitignore +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/.gitmodules +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/.kateconfig +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/CHANGELOG.md +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/CODEOWNERS +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/CONTRIBUTING.md +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/LICENSE +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/MANIFEST.in +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/README.rst +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/12AX.cluster_map +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/_setup_returnn_env.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-fwd.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-horovod-mpi.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-horovod-mpi.py.sh +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-horovod-mpi.sh +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-hyper-param-tuning.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-iter-dataset.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-list-devices.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-lua-torch-layer.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-pretrain.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-record-and-push-to-webserver.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-returnn-as-framework.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-rf-pt-benchmark.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-rf.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-rhn-enwik8.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-sprint-interface.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-tf-att-copy.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-tf-attention.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-tf-chunking-blstm.12ax.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-tf-contribrnn-lstm.12ax.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-tf-enc-dec.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-tf-hard-att-copy.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-tf-lstm-benchmark.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-tf-maxgradnorm-lstm.12ax.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-tf-native-lstm-lowmem.12ax.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-tf-native-lstm.12ax.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-tf-native-lstm2.12ax.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-tf-native-lstm2.12ax.tuned.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-tf-neural-transducer.12ax.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-tf-rec-explicit-lstm.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-tf-rec-explicit-rnn.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-tf-rec-self-att.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-tf-search-compiled-graph.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-tf-vanilla-lstm.12ax.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-timit-lstm-ctc.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-torch.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo-upd-mult-model.lstm.12ax.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/demo.sh +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/IAM/IAM_lines/a01-000u-00.png +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/IAM/IAM_lines/a01-007-04.png +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/IAM/IAM_lines/a01-007-06.png +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/IAM/README.txt +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/IAM/chars.txt +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/IAM/config_demo +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/IAM/config_fwd +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/IAM/config_real +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/IAM/create_IAM_dataset.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/IAM/decode.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/IAM/features/raw/demo.h5 +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/IAM/go.sh +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/IAM/lines.txt +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/IAM/split/eval.txt +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/IAM/split/train.txt +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/IAM/split/valid.txt +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/README.md +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/artificial/create_test_h5.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/artificial/forwardconfig +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/artificial/go.sh +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/artificial/trainconfig +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/artificial_rgb/create_test_h5.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/artificial_rgb/forwardconfig +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/artificial_rgb/go.sh +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/demos/mdlstm/artificial_rgb/trainconfig +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/pyproject.toml +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/requirements.txt +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/__main__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/__old_mod_loader__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/__setup__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/config.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/audio.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/basic.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/bundle_file.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/cached.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/cached2.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/distrib_files.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/generating.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/hdf.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/lm.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/map.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/meta.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/multi_proc.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/normalization_data.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/numpy_dump.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/postprocessing.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/raw_wav.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/sprint.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/stereo.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/text_dict.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/util/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/util/feature_extraction.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/util/strings.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/datasets/util/vocabulary.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/engine/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/engine/base.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/engine/batch.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/__main__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/.git +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/.gitignore +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/LICENSE +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/README.md +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/aligner.gif +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/check.png +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/core.cu +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/core.h +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/core_cpu.cpp +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/pytorch_binding/LICENSE +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/pytorch_binding/MANIFEST.in +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/pytorch_binding/README.md +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/pytorch_binding/binding.cpp +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/pytorch_binding/core.cu +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/pytorch_binding/core.h +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/pytorch_binding/requirements.txt +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/pytorch_binding/setup.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/pytorch_binding/warp_rna/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/pytorch_binding/warp_rna/test.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/ref_rna.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/setup.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/src/warp_rna_op.cc +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/src/warp_rna_op_kernel_tmpl.h +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/tensorflow_binding/warp_rna/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/WarpRna/warp-rna/test.cpp +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/graph_editor/README.md +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/graph_editor/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/graph_editor/edit.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/graph_editor/reroute.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/graph_editor/select.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/graph_editor/subgraph.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/graph_editor/transform.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/extern/graph_editor/util.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/forward_iface.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/_backend.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/_cache.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/_native/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/_native/backend.cpp +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/_native/backend.hpp +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/_native/module.cpp +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/_native/module.hpp +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/_native/py_utils.hpp +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/_native/tensor_ops.cpp +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/_native/tensor_ops.hpp +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/_random_journal.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/_utils.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/array_.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/attention.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/audio/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/audio/mel.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/audio/specaugment.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/backend.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/build_from_dict.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/cond.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/const.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/container.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/control_flow_ctx.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/conversions/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/conversions/espnet_e_branchformer.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/conversions/hf_llama.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/conversions/torch_nn.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/decoder/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/decoder/transformer.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/device.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/dropout.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/dtype.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/encoder/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/encoder/base.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/encoder/conformer.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/encoder/conformer_v2.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/encoder/e_branchformer.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/encoder/transformer.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/gradient.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/graph.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/hooks.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/init.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/label_smoothing.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/linear.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/loop.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/loss.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/math_.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/matmul.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/module.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/normalization.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/parameter.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/parametrizations.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/parametrize.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/piecewise_linear.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/rand.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/rec.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/reduce.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/run_ctx.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/signal.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/state.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/stepwise_scheduler.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/tensor_array.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/frontend/types.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/import_/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/import_/common.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/import_/git.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/import_/import_.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/learning_rate_control.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/log.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/native_op.cpp +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/native_op.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/pretrain.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/sprint/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/sprint/cache.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/sprint/control.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/sprint/error_signals.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/sprint/extern_interface.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/sprint/interface.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tensor/README.md +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tensor/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tensor/_tensor_mixin_base.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tensor/_tensor_op_overloads.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tensor/control_flow_ctx.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tensor/marked_dim.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tensor/tensor.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/compat.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/data_pipeline.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/distributed.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/engine.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/frontend_layers/README.md +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/frontend_layers/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/frontend_layers/_utils.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/frontend_layers/cond.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/frontend_layers/config_entry_points.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/frontend_layers/debug_eager_mode.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/frontend_layers/dims.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/frontend_layers/masked_computation.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/frontend_layers/parameter_assign.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/frontend_layers/prev_tensor_ref.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/frontend_low_level/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/horovod.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/hyper_param_tuning.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/layers/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/layers/segmental_model.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/layers/signal_processing.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/layers/variable.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/native_op.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/network.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/sprint.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/updater.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/util/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/util/basic.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/util/data.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/util/gradient_checkpoint.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/util/ken_lm.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/util/open_fst.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/README.md +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/data/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/data/extern_data.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/data/pipeline.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/data/queued_data_iter.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/data/returnn_dataset_wrapper.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/data/tensor_utils.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/distributed.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/frontend/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/frontend/_rand.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/frontend/bridge.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/frontend/raw_ops.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/optim/README.md +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/optim/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/optim/lion.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/updater.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/util/README.md +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/util/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/util/array_.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/util/debug_inf_nan.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/util/diagnose_gpu.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/util/gradient_checkpoint.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/util/module.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/util/scaled_gradient.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/__init__.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/basic.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/bpe.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/debug.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/debug_helpers.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/file_cache.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/fsa.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/literal_py_to_pickle.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/lru_cache.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/math.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/multi_proc_non_daemonic_spawn.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/native_code_compiler.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/pprint.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/py-to-pickle.cpp +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/py_ext_mod_compiler.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/result_with_reason.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/sig_proc.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/task_system.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/train_proc_manager.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/util/watch_memory.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn.egg-info/SOURCES.txt +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn.egg-info/dependency_links.txt +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn.egg-info/top_level.txt +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/rnn.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/setup.cfg +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/setup.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/DummySprintExec.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/PyCharm-inspection-profile.xml +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/PyCharm.idea/.gitignore +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/PyCharm.idea/.name +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/PyCharm.idea/codeStyleSettings.xml +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/PyCharm.idea/codeStyles/Project.xml +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/PyCharm.idea/codeStyles/codeStyleConfig.xml +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/PyCharm.idea/inspectionProfiles/Project_Default.xml +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/PyCharm.idea/inspectionProfiles/profiles_settings.xml +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/PyCharm.idea/misc.xml +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/PyCharm.idea/modules.xml +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/PyCharm.idea/returnn.iml +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/PyCharm.idea/scopes/scope_settings.xml +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/_set_num_threads1.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/_setup_returnn_env.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/_setup_test_env.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/bpe-unicode-demo.codes +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/bpe-unicode-demo.vocab +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/lexicon_opt.fst +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/lexicon_opt.isyms +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/lexicon_opt.jpg +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/lexicon_opt.osyms +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/lint_common.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/pycharm-inspect.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/pylint.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/returnn-as-framework.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/spelling.dic +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_Config.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_Dataset.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_Fsa.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_GeneratingDataset.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_HDFDataset.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_LearningRateControl.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_Log.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_MultiProcDataset.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_Pretrain.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_ResNet.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_SprintDataset.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_SprintInterface.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_TFEngine.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_TFNativeOp.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_TFNetworkLayer.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_TFNetworkRecLayer.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_TFNetworkSigProcLayer.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_TFUpdater.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_TF_determinism.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_TaskSystem.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_TaskSystem_SharedMem.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_TranslationDataset.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_Util.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_demos.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_fork_exec.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_hdf_dump.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_rf_array.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_rf_attention.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_rf_base.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_rf_cond.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_rf_const.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_rf_container.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_rf_conv.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_rf_decoder_transformer.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_rf_encoder_conformer.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_rf_gradient.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_rf_label_smoothing.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_rf_loop.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_rf_math.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_rf_normalization.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_rf_piecewise_linear.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_rf_reduce.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_rf_signal.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_tensor.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_threading.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_tools.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_torch_dataset.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_torch_engine.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_torch_frontend.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_torch_internal_frontend.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/test_torch_util.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tests/torch_utils.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/_setup_returnn_env.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/analyze-dataset-batches.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/bliss-collect-seq-lens.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/bliss-dump-text.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/bliss-get-segment-names.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/bliss-to-ogg-zip.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/bpe-create-lexicon.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/calculate-word-error-rate.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/cleanup-old-models.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/collect-orth-symbols.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/collect-words.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/compile_native_op.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/compile_tf_graph.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/debug-dump-search-scores.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/debug-plot-search-scores.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/dump-dataset-raw-strings.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/dump-dataset.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/dump-forward-stats.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/dump-forward.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/dump-network-json.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/dump-pickle.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/extract_state_tying_from_dataset.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/get-attention-weights.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/get-best-model-epoch.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/hdf_dump.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/hdf_dump_translation_dataset.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/import-blocks-mt-model.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/import-t2t-mt-model.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/lattice_rescorer/.gitignore +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/lattice_rescorer/Makefile +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/lattice_rescorer/README.md +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/lattice_rescorer/example/README.md +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/lattice_rescorer/example/libs_list +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/lattice_rescorer/example/network.040/i600_m600_m600.sgd_b16_lr0_cl2.newbobabs.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/lattice_rescorer/example/network.040/i600_m600_m600.sgd_b16_lr0_cl2.newbobabs.keep_over_epoch.lstm2.config +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/lattice_rescorer/example/rescore_lattice.sh +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/lattice_rescorer/example/state_vars_list +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/lattice_rescorer/example/tensor_names_list +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/lattice_rescorer/file.h +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/lattice_rescorer/htklatticerescorer.cc +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/lattice_rescorer/htklatticerescorer.h +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/lattice_rescorer/main.cc +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/lattice_rescorer/rescorer.h +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/lattice_rescorer/vocabulary.cc +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/lattice_rescorer/vocabulary.h +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/tf_avg_checkpoints.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/tf_inspect_checkpoint.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/tf_inspect_summary_log.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/torch_avg_checkpoints.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/torch_inspect_checkpoint.py +0 -0
- {returnn-1.20250120.153919 → returnn-1.20250122.134518}/tools/torch_inspect_checkpoint_and_opt.py +0 -0
|
@@ -166,7 +166,7 @@ class NumpyBackend(Backend[numpy.ndarray]):
|
|
|
166
166
|
:param device:
|
|
167
167
|
:return: tensor with shape [dim]
|
|
168
168
|
"""
|
|
169
|
-
if not dtype and dim.dyn_size_ext:
|
|
169
|
+
if not dtype and dim.dyn_size_ext is not None:
|
|
170
170
|
dtype = dim.dyn_size_ext.dtype
|
|
171
171
|
if not dtype:
|
|
172
172
|
dtype = rf.get_default_array_index_dtype()
|
|
@@ -649,7 +649,7 @@ def make_conv_out_spatial_dims(
|
|
|
649
649
|
assert isinstance(out_spatial_dim, Dim)
|
|
650
650
|
if description_prefix and out_spatial_dim != in_spatial_dim:
|
|
651
651
|
out_spatial_dim.name = f"{description_prefix}:spatial{i}"
|
|
652
|
-
if in_spatial_dim.dyn_size_ext and
|
|
652
|
+
if in_spatial_dim.dyn_size_ext and out_spatial_dim.dyn_size_ext is None:
|
|
653
653
|
out_spatial_dim.dyn_size_ext = _calc_out_dim(
|
|
654
654
|
in_dim=in_spatial_dim.dyn_size_ext,
|
|
655
655
|
filter_size=filter_size[i],
|
|
@@ -30,7 +30,7 @@ def range_over_dim(dim: Dim, *, dtype: Optional[str] = None, device: Optional[st
|
|
|
30
30
|
:param device,
|
|
31
31
|
:return: tensor with shape [dim]
|
|
32
32
|
"""
|
|
33
|
-
if dim.dyn_size_ext:
|
|
33
|
+
if dim.dyn_size_ext is not None:
|
|
34
34
|
backend = get_backend_by_tensor(dim.dyn_size_ext, fallback=global_backend)
|
|
35
35
|
else:
|
|
36
36
|
backend = global_backend
|
|
@@ -96,7 +96,7 @@ class _DimExtra:
|
|
|
96
96
|
self.match_priority = match_priority
|
|
97
97
|
if src_data:
|
|
98
98
|
assert isinstance(src_data, _t.Tensor) and isinstance(src_axis, int)
|
|
99
|
-
if not batch and dim.dyn_size_ext:
|
|
99
|
+
if not batch and dim.dyn_size_ext is not None:
|
|
100
100
|
batch = dim.dyn_size_ext.batch
|
|
101
101
|
if not control_flow_ctx:
|
|
102
102
|
control_flow_ctx = dim.dyn_size_ext.control_flow_ctx
|
|
@@ -197,7 +197,7 @@ class _DimMixin:
|
|
|
197
197
|
:return: batch info (deprecated)
|
|
198
198
|
"""
|
|
199
199
|
if not self._extra:
|
|
200
|
-
if self.dyn_size_ext:
|
|
200
|
+
if self.dyn_size_ext is not None:
|
|
201
201
|
return self.dyn_size_ext.batch
|
|
202
202
|
return None
|
|
203
203
|
return self._extra.batch
|
|
@@ -214,7 +214,7 @@ class _DimMixin:
|
|
|
214
214
|
:return: control flow context (deprecated)
|
|
215
215
|
"""
|
|
216
216
|
if not self._extra:
|
|
217
|
-
if self.dyn_size_ext:
|
|
217
|
+
if self.dyn_size_ext is not None:
|
|
218
218
|
return self.dyn_size_ext.control_flow_ctx
|
|
219
219
|
return None
|
|
220
220
|
return self._extra.control_flow_ctx
|
|
@@ -291,7 +291,7 @@ class _DimMixin:
|
|
|
291
291
|
elif self.dimension is not None:
|
|
292
292
|
desc += f"({self.dimension})"
|
|
293
293
|
else:
|
|
294
|
-
if self.dyn_size_ext:
|
|
294
|
+
if self.dyn_size_ext is not None:
|
|
295
295
|
desc += "[%s]" % ",".join(self.dyn_size_ext.get_batch_axes_short_description(special_axes=False))
|
|
296
296
|
else:
|
|
297
297
|
desc += "[?]"
|
|
@@ -383,7 +383,7 @@ class _DimMixin:
|
|
|
383
383
|
visited.add(id(dim))
|
|
384
384
|
dim.reset_batch_ctx()
|
|
385
385
|
dim._dyn_size_max_value = None
|
|
386
|
-
if dim.dyn_size_ext:
|
|
386
|
+
if dim.dyn_size_ext is not None:
|
|
387
387
|
dim.dyn_size_ext.reset()
|
|
388
388
|
# noinspection PyProtectedMember
|
|
389
389
|
dim_extra = dim._extra
|
|
@@ -446,7 +446,7 @@ class _DimMixin:
|
|
|
446
446
|
# E.g. ctx == loop(time_dim), when self.control_flow_ctx == None,
|
|
447
447
|
# we can use self in ctx, iff time_dim not in self.dyn_size_ext.dim_tags.
|
|
448
448
|
# We can only do this check if we know about dyn_size_ext.
|
|
449
|
-
if
|
|
449
|
+
if self.dyn_size_ext is None:
|
|
450
450
|
return False
|
|
451
451
|
parent_dims = ControlFlowContext.collect_parent_dims(ctx)
|
|
452
452
|
for dim in self.dyn_size_ext.dim_tags:
|
|
@@ -488,12 +488,12 @@ class _DimMixin:
|
|
|
488
488
|
if key in extra.same_for_batch_ctx:
|
|
489
489
|
same = extra.same_for_batch_ctx[key]
|
|
490
490
|
if same is not self:
|
|
491
|
-
if same.dyn_size_ext and
|
|
491
|
+
if same.dyn_size_ext and self.dyn_size_ext is None:
|
|
492
492
|
self.dyn_size_ext = same.dyn_size_ext
|
|
493
493
|
if same.dyn_size_ext and same.dyn_size_ext.placeholder is not None:
|
|
494
494
|
if self.dyn_size_ext.placeholder is None:
|
|
495
495
|
self.dyn_size_ext = same.dyn_size_ext
|
|
496
|
-
if self.dyn_size_ext and
|
|
496
|
+
if self.dyn_size_ext and same.dyn_size_ext is None:
|
|
497
497
|
same.dyn_size_ext = self.dyn_size_ext
|
|
498
498
|
if self.dyn_size_ext and self.dyn_size_ext.placeholder is not None:
|
|
499
499
|
if not same.dyn_size_ext or same.dyn_size_ext.placeholder is None:
|
|
@@ -521,10 +521,10 @@ class _DimMixin:
|
|
|
521
521
|
from returnn.tensor import ControlFlowContext
|
|
522
522
|
|
|
523
523
|
assert self.can_be_used_as_dim()
|
|
524
|
-
if self.batch == batch and self._can_use_in_ctx(ctx) and self.dyn_size_ext:
|
|
524
|
+
if self.batch == batch and self._can_use_in_ctx(ctx) and self.dyn_size_ext is not None:
|
|
525
525
|
self._validate_in_current_graph()
|
|
526
526
|
self._maybe_update()
|
|
527
|
-
if self.batch == batch and self._can_use_in_ctx(ctx) and self.dyn_size_ext: # check again
|
|
527
|
+
if self.batch == batch and self._can_use_in_ctx(ctx) and self.dyn_size_ext is not None: # check again
|
|
528
528
|
return self
|
|
529
529
|
if self.is_batch_dim():
|
|
530
530
|
# We ignore the ctx for the batch dim currently.
|
|
@@ -550,11 +550,11 @@ class _DimMixin:
|
|
|
550
550
|
assert (
|
|
551
551
|
tag.batch == batch
|
|
552
552
|
) # some code updated batch directly (incorrectly) and could trigger this
|
|
553
|
-
if tag.dyn_size_ext:
|
|
553
|
+
if tag.dyn_size_ext is not None:
|
|
554
554
|
return tag
|
|
555
555
|
dim_tag = tag
|
|
556
556
|
break
|
|
557
|
-
if same_base.batch == batch and same_base._can_use_in_ctx(ctx) and same_base.dyn_size_ext:
|
|
557
|
+
if same_base.batch == batch and same_base._can_use_in_ctx(ctx) and same_base.dyn_size_ext is not None:
|
|
558
558
|
return same_base
|
|
559
559
|
else:
|
|
560
560
|
same_base = self
|
|
@@ -585,7 +585,7 @@ class _DimMixin:
|
|
|
585
585
|
batch_base = batch.get_global_base()
|
|
586
586
|
base_can_use_in_ctx = None # type: Optional[_d.Dim]
|
|
587
587
|
# noinspection PyProtectedMember
|
|
588
|
-
if same_base.batch == batch_base and same_base._can_use_in_ctx(ctx) and same_base.dyn_size_ext:
|
|
588
|
+
if same_base.batch == batch_base and same_base._can_use_in_ctx(ctx) and same_base.dyn_size_ext is not None:
|
|
589
589
|
base_can_use_in_ctx = same_base
|
|
590
590
|
elif same_base._extra:
|
|
591
591
|
from returnn.tf.util.data import ControlFlowContext
|
|
@@ -593,10 +593,15 @@ class _DimMixin:
|
|
|
593
593
|
for ctx_ in ControlFlowContext.abs_ctx_stack_with_root(ctx):
|
|
594
594
|
# noinspection PyProtectedMember
|
|
595
595
|
tag = same_base._extra.same_for_batch_ctx.get((batch_base, ctx_), None)
|
|
596
|
-
if
|
|
596
|
+
if (
|
|
597
|
+
tag
|
|
598
|
+
and tag._can_use_in_ctx(ctx)
|
|
599
|
+
and tag._validate_in_current_graph()
|
|
600
|
+
and tag.dyn_size_ext is not None
|
|
601
|
+
):
|
|
597
602
|
base_can_use_in_ctx = tag
|
|
598
603
|
break
|
|
599
|
-
if base_can_use_in_ctx and base_can_use_in_ctx.dyn_size_ext:
|
|
604
|
+
if base_can_use_in_ctx and base_can_use_in_ctx.dyn_size_ext is not None:
|
|
600
605
|
if base_can_use_in_ctx.dyn_size_ext.have_batch_axis():
|
|
601
606
|
# The same_base has some dyn size without any beam nor control flow context.
|
|
602
607
|
# We can expand it to the current beam, or extend by padded batch.
|
|
@@ -654,13 +659,13 @@ class _DimMixin:
|
|
|
654
659
|
# In any case, reuse it then.
|
|
655
660
|
candidate.batch = batch
|
|
656
661
|
if dyn_size_ext:
|
|
657
|
-
if candidate.dyn_size_ext:
|
|
662
|
+
if candidate.dyn_size_ext is not None:
|
|
658
663
|
candidate.dyn_size_ext.batch = batch
|
|
659
664
|
assert candidate.dyn_size_ext.dim_tags == dyn_size_ext.dim_tags
|
|
660
665
|
else:
|
|
661
666
|
candidate.dyn_size_ext = dyn_size_ext
|
|
662
667
|
assert not candidate.dyn_size_ext.control_flow_ctx
|
|
663
|
-
elif candidate.dyn_size_ext:
|
|
668
|
+
elif candidate.dyn_size_ext is not None:
|
|
664
669
|
candidate.dyn_size_ext.batch = batch
|
|
665
670
|
else:
|
|
666
671
|
candidate.complete_dyn_size(template_only=True)
|
|
@@ -684,11 +689,11 @@ class _DimMixin:
|
|
|
684
689
|
dim_tag.set_tag_on_size_tensor(dyn_size_ext.placeholder, batch=batch)
|
|
685
690
|
same_base_extra.same_for_batch_ctx[(batch, ctx)] = dim_tag
|
|
686
691
|
if dyn_size_ext:
|
|
687
|
-
if
|
|
692
|
+
if dim_tag.dyn_size_ext is None:
|
|
688
693
|
dim_tag.dyn_size_ext = dyn_size_ext
|
|
689
694
|
else:
|
|
690
695
|
assert dim_tag.dyn_size_ext.dims == dyn_size_ext.dims
|
|
691
|
-
elif dim_tag.dyn_size_ext:
|
|
696
|
+
elif dim_tag.dyn_size_ext is not None:
|
|
692
697
|
pass
|
|
693
698
|
else:
|
|
694
699
|
dim_tag.complete_dyn_size(template_only=True)
|
|
@@ -719,7 +724,7 @@ class _DimMixin:
|
|
|
719
724
|
assert self.can_be_used_as_dim()
|
|
720
725
|
same = self.get_for_batch_ctx(batch, ctx)
|
|
721
726
|
assert dyn_size_ext.batch == batch and dyn_size_ext.control_flow_ctx == ctx
|
|
722
|
-
if same.dyn_size_ext:
|
|
727
|
+
if same.dyn_size_ext is not None:
|
|
723
728
|
assert same.dyn_size_ext.dim_tags == dyn_size_ext.dim_tags
|
|
724
729
|
if dyn_size_ext.placeholder is not None:
|
|
725
730
|
same.dyn_size_ext.placeholder = dyn_size_ext.placeholder
|
|
@@ -755,7 +760,7 @@ class _DimMixin:
|
|
|
755
760
|
If the dyn size can potentially be of a different shape, directly access dyn_size_ext.
|
|
756
761
|
:rtype: tf.Tensor|None
|
|
757
762
|
"""
|
|
758
|
-
if self.dyn_size_ext:
|
|
763
|
+
if self.dyn_size_ext is not None:
|
|
759
764
|
return self.dyn_size_ext.placeholder
|
|
760
765
|
return None
|
|
761
766
|
|
|
@@ -788,7 +793,7 @@ class _DimMixin:
|
|
|
788
793
|
"""
|
|
789
794
|
:param tf.Tensor dyn_size:
|
|
790
795
|
"""
|
|
791
|
-
if self.dyn_size_ext:
|
|
796
|
+
if self.dyn_size_ext is not None:
|
|
792
797
|
if self.dyn_size_ext.placeholder is not None:
|
|
793
798
|
# Do not allow resetting it to sth different.
|
|
794
799
|
assert self.dyn_size_ext.placeholder is dyn_size
|
|
@@ -911,12 +916,12 @@ class _DimMixin:
|
|
|
911
916
|
return True
|
|
912
917
|
if not self.dyn_size_ext and self.dimension is not None:
|
|
913
918
|
return True
|
|
914
|
-
if self.dyn_size_ext:
|
|
919
|
+
if self.dyn_size_ext is not None:
|
|
915
920
|
return True
|
|
916
921
|
extra = self._get_same_base_extra()
|
|
917
922
|
if extra:
|
|
918
923
|
for _, other in extra.same_for_batch_ctx.items():
|
|
919
|
-
if other.dyn_size_ext:
|
|
924
|
+
if other.dyn_size_ext is not None:
|
|
920
925
|
return True
|
|
921
926
|
return False
|
|
922
927
|
|
|
@@ -939,7 +944,7 @@ class _DimMixin:
|
|
|
939
944
|
for dim in candidates:
|
|
940
945
|
# By intention, ignore the batch, only check the ctx.
|
|
941
946
|
# Keep logic in sync with get_for_batch_ctx.
|
|
942
|
-
if ControlFlowContext.is_parent_or_same(dim.control_flow_ctx, ctx) and dim.dyn_size_ext:
|
|
947
|
+
if ControlFlowContext.is_parent_or_same(dim.control_flow_ctx, ctx) and dim.dyn_size_ext is not None:
|
|
943
948
|
return True
|
|
944
949
|
return False
|
|
945
950
|
|
|
@@ -973,7 +978,7 @@ class _DimMixin:
|
|
|
973
978
|
return False
|
|
974
979
|
if self.capacity is not None:
|
|
975
980
|
return True
|
|
976
|
-
if
|
|
981
|
+
if self.dyn_size_ext is None: # unknown, so we can only guess
|
|
977
982
|
if self.is_batch_dim():
|
|
978
983
|
return False
|
|
979
984
|
return True
|
|
@@ -1130,7 +1135,7 @@ class _DimMixin:
|
|
|
1130
1135
|
for x_dim in op.inputs:
|
|
1131
1136
|
if self.batch:
|
|
1132
1137
|
x_dim = x_dim.get_for_batch_ctx(self.batch, self.control_flow_ctx)
|
|
1133
|
-
if x_dim.dyn_size_ext:
|
|
1138
|
+
if x_dim.dyn_size_ext is not None:
|
|
1134
1139
|
size_dtype = x_dim.dyn_size_ext.dtype
|
|
1135
1140
|
break
|
|
1136
1141
|
if not size_dtype:
|
|
@@ -1275,7 +1280,7 @@ class _DimMixin:
|
|
|
1275
1280
|
if not template_only and y.raw_tensor is not None:
|
|
1276
1281
|
y_max_value = _bin_op(y_max_value, x_dim.get_dim_value_tensor())
|
|
1277
1282
|
assert y, f"op {op}?"
|
|
1278
|
-
if self.dyn_size_ext:
|
|
1283
|
+
if self.dyn_size_ext is not None:
|
|
1279
1284
|
assert self.dyn_size_ext.dim_tags == y.dim_tags
|
|
1280
1285
|
if y.batch:
|
|
1281
1286
|
if self.batch:
|
|
@@ -1789,7 +1794,7 @@ class _DimMixin:
|
|
|
1789
1794
|
self_dim = self._make_extra().same_for_batch_ctx.get(key, None)
|
|
1790
1795
|
if self_dim and (self_dim.dyn_size_ext or not dim.dyn_size_ext):
|
|
1791
1796
|
continue # keep ours
|
|
1792
|
-
if
|
|
1797
|
+
if dim.dyn_size_ext is None:
|
|
1793
1798
|
continue # undefined, do not overtake
|
|
1794
1799
|
self._extra.same_for_batch_ctx[key] = dim
|
|
1795
1800
|
# noinspection PyProtectedMember
|
|
@@ -1815,8 +1820,8 @@ class _DimMixin:
|
|
|
1815
1820
|
key = base.batch, base.control_flow_ctx
|
|
1816
1821
|
assert key not in self_base_extra.same_for_batch_ctx
|
|
1817
1822
|
self_base_extra.same_for_batch_ctx[key] = self
|
|
1818
|
-
if
|
|
1819
|
-
if base.dyn_size_ext:
|
|
1823
|
+
if self.dyn_size_ext is None:
|
|
1824
|
+
if base.dyn_size_ext is not None:
|
|
1820
1825
|
if base.batch and base.batch == self.batch and base.control_flow_ctx == self.control_flow_ctx:
|
|
1821
1826
|
self.dyn_size_ext = base.dyn_size_ext.copy_template(name="%s:size" % self_base.description)
|
|
1822
1827
|
elif base.is_batch_dim():
|
|
@@ -1914,7 +1919,7 @@ class _DimMixin:
|
|
|
1914
1919
|
:return: size tensor, or dyn_size_ext if defined
|
|
1915
1920
|
:rtype: _t.Tensor
|
|
1916
1921
|
"""
|
|
1917
|
-
if self.dyn_size_ext:
|
|
1922
|
+
if self.dyn_size_ext is not None:
|
|
1918
1923
|
if not device or device == "cpu":
|
|
1919
1924
|
return self.dyn_size_ext
|
|
1920
1925
|
return self.get_dyn_size_ext_for_device(device)
|
|
@@ -300,7 +300,7 @@ class _TensorMixin(_TensorMixinBase):
|
|
|
300
300
|
# Note: tag.kind (feature or spatial) is independent from self.feature_dim_axis.
|
|
301
301
|
if tag.batch and self.batch:
|
|
302
302
|
assert tag.batch == self.batch or self.batch.is_broadcast()
|
|
303
|
-
if tag.dyn_size_ext:
|
|
303
|
+
if tag.dyn_size_ext is not None:
|
|
304
304
|
assert tag.dyn_size_ext.dtype in {"int32", "int64"}
|
|
305
305
|
if tag.dyn_size_ext.have_batch_axis():
|
|
306
306
|
assert tag.batch == tag.dyn_size_ext.batch
|
|
@@ -803,7 +803,7 @@ class _TensorMixin(_TensorMixinBase):
|
|
|
803
803
|
if batch:
|
|
804
804
|
batch_dim_ = batch.dim
|
|
805
805
|
elif dim_tag:
|
|
806
|
-
if dim_tag.dyn_size_ext:
|
|
806
|
+
if dim_tag.dyn_size_ext is not None:
|
|
807
807
|
assert dim_tag.dyn_size_ext.dims == ()
|
|
808
808
|
assert dim_tag.dyn_size_ext.raw_tensor is not None
|
|
809
809
|
batch_dim_ = dim_tag.dyn_size_ext.raw_tensor
|
|
@@ -1788,7 +1788,7 @@ class _TensorMixin(_TensorMixinBase):
|
|
|
1788
1788
|
if self.sparse_dim and self.sparse_dim not in self_dim_tags:
|
|
1789
1789
|
dims.add(_m.ImplicitSparseDim(self.sparse_dim))
|
|
1790
1790
|
for dim in self.dim_tags:
|
|
1791
|
-
if dim.dyn_size_ext:
|
|
1791
|
+
if dim.dyn_size_ext is not None:
|
|
1792
1792
|
for dim_ in dim.dyn_size_ext.dim_tags:
|
|
1793
1793
|
if dim_ not in self_dim_tags:
|
|
1794
1794
|
dims.add(_m.ImplicitDynSizeDim(dim_))
|
|
@@ -2791,7 +2791,7 @@ class _TensorMixin(_TensorMixinBase):
|
|
|
2791
2791
|
assert self.time_dim_axis is not None
|
|
2792
2792
|
dim = self._dims[self.time_dim_axis]
|
|
2793
2793
|
assert isinstance(dim, Dim)
|
|
2794
|
-
if dim.dyn_size_ext:
|
|
2794
|
+
if dim.dyn_size_ext is not None:
|
|
2795
2795
|
if dim.dyn_size_ext.raw_tensor is None:
|
|
2796
2796
|
dim.complete_dyn_size()
|
|
2797
2797
|
assert dim.dyn_size_ext.raw_tensor is not None
|
|
@@ -3699,7 +3699,7 @@ def _create_size_placeholder(name, axis_wo_b, tag, batch_dim):
|
|
|
3699
3699
|
|
|
3700
3700
|
with reuse_name_scope("extern_data/placeholders/%s" % name, absolute=True):
|
|
3701
3701
|
dyn_size_name = "%s_dim%i_size" % (name, axis_wo_b)
|
|
3702
|
-
if
|
|
3702
|
+
if tag.dyn_size_ext is None:
|
|
3703
3703
|
dyn_size_ext = _t.Tensor(
|
|
3704
3704
|
name=dyn_size_name,
|
|
3705
3705
|
dtype=_t.Tensor.size_dtype,
|
|
@@ -83,7 +83,7 @@ class Dim(_DimMixin):
|
|
|
83
83
|
self.dyn_size_ext = dimension.copy()
|
|
84
84
|
else:
|
|
85
85
|
raise TypeError(f"unexpected dimension type: {type(dimension)}")
|
|
86
|
-
if not name and not description and self.dyn_size_ext:
|
|
86
|
+
if not name and not description and self.dyn_size_ext is not None:
|
|
87
87
|
name = self.dyn_size_ext.name
|
|
88
88
|
self.name = name or description
|
|
89
89
|
self._dyn_size_max_value = None
|
|
@@ -107,7 +107,7 @@ class TensorDict:
|
|
|
107
107
|
dim_value, expected_value_type
|
|
108
108
|
), f"key {key_} {dim}: unexpected {type(dim_value)}, expected {expected_value_type}"
|
|
109
109
|
out[key_] = dim_value
|
|
110
|
-
elif dim.dyn_size_ext:
|
|
110
|
+
elif dim.dyn_size_ext is not None:
|
|
111
111
|
if include_scalar_dyn_sizes or dim.dyn_size_ext.dims:
|
|
112
112
|
assert isinstance(dim.dyn_size_ext.raw_tensor, expected_value_type), (
|
|
113
113
|
f"key {key_} {dim} {dim.dyn_size_ext}:"
|
|
@@ -147,9 +147,9 @@ class TensorDict:
|
|
|
147
147
|
continue
|
|
148
148
|
key_ = f"{key}:size{i}"
|
|
149
149
|
dim.reset_raw(only_self=True)
|
|
150
|
-
if dim.is_batch_dim() and
|
|
150
|
+
if dim.is_batch_dim() and dim.dyn_size_ext is None:
|
|
151
151
|
dim.dyn_size_ext = Tensor("batch", [], dtype="int32")
|
|
152
|
-
if dim.dyn_size_ext:
|
|
152
|
+
if dim.dyn_size_ext is not None:
|
|
153
153
|
if not with_scalar_dyn_sizes and not dim.dyn_size_ext.dims:
|
|
154
154
|
pass
|
|
155
155
|
else:
|
|
@@ -51,11 +51,11 @@ def tensor_fill_random_numpy_(
|
|
|
51
51
|
filled_this_round = False
|
|
52
52
|
|
|
53
53
|
for dim in x.dims:
|
|
54
|
-
if dim.is_batch_dim() and
|
|
54
|
+
if dim.is_batch_dim() and dim.dyn_size_ext is None:
|
|
55
55
|
dim.dyn_size_ext = Tensor("batch", [], dtype="int32")
|
|
56
|
-
if dim.is_dynamic() and
|
|
56
|
+
if dim.is_dynamic() and dim.dyn_size_ext is None:
|
|
57
57
|
dim.dyn_size_ext = Tensor(dim.name or "time", dims=[batch_dim], dtype="int32")
|
|
58
|
-
if
|
|
58
|
+
if dim.dyn_size_ext is None:
|
|
59
59
|
continue
|
|
60
60
|
if tensor_fill_random_numpy_(
|
|
61
61
|
dim.dyn_size_ext,
|
{returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/frontend_layers/_backend.py
RENAMED
|
@@ -746,7 +746,7 @@ class ReturnnLayersBackend(Backend[Layer]):
|
|
|
746
746
|
@staticmethod
|
|
747
747
|
def range_over_dim(dim: Dim, *, dtype: Optional[str] = None, device: Optional[str] = None) -> Tensor:
|
|
748
748
|
"""range over dim"""
|
|
749
|
-
if not dtype and dim.dyn_size_ext:
|
|
749
|
+
if not dtype and dim.dyn_size_ext is not None:
|
|
750
750
|
dtype = dim.dyn_size_ext.dtype
|
|
751
751
|
if not dtype:
|
|
752
752
|
dtype = rf.get_default_array_index_dtype()
|
|
@@ -1098,7 +1098,7 @@ class _NetDictBuilderCtx:
|
|
|
1098
1098
|
continue
|
|
1099
1099
|
# We need dyn_size_ext to know the implicit dims, to correctly set out_shape.
|
|
1100
1100
|
# If dyn_size_ext is not set yet, try to complete it.
|
|
1101
|
-
if
|
|
1101
|
+
if dim.dyn_size_ext is None:
|
|
1102
1102
|
dim.complete_dyn_size()
|
|
1103
1103
|
assert (
|
|
1104
1104
|
dim.dyn_size_ext
|
|
@@ -247,7 +247,7 @@ class Loop:
|
|
|
247
247
|
"""
|
|
248
248
|
assert not self.end_ref, f"{self}.end() can only be called once"
|
|
249
249
|
assert source.dtype == "bool", f"{self}: end expects boolean condition, got {source}"
|
|
250
|
-
if
|
|
250
|
+
if self.axis.dyn_size_ext is None:
|
|
251
251
|
dyn_size_ext = source.copy_template()
|
|
252
252
|
dyn_size_ext.dtype = "int32"
|
|
253
253
|
if dyn_size_ext.control_flow_ctx:
|
{returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/frontend_layers/make_layer.py
RENAMED
|
@@ -305,7 +305,7 @@ def register_extern_data(data: Tensor[rfl.Layer]):
|
|
|
305
305
|
root_scope = rfl.Layer.top().root # must exist
|
|
306
306
|
_get_raw_layer_by_name(f"data:{data.name}", scope=root_scope, data=data)
|
|
307
307
|
for i, (tag, orig_tag) in enumerate(zip(data.dim_tags, orig_dim_tags)):
|
|
308
|
-
if not tag.is_batch_dim() and tag.is_dynamic() and
|
|
308
|
+
if not tag.is_batch_dim() and tag.is_dynamic() and tag.dyn_size_ext is None:
|
|
309
309
|
# Undefined dynamic dim tag. Set default data template.
|
|
310
310
|
orig_tag.dyn_size_ext = tag.dyn_size_ext = Tensor(
|
|
311
311
|
name=f"{tag.name or (data.name + f'[{i}]')}_default_dyn_size_ext",
|
{returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/tf/frontend_low_level/_backend.py
RENAMED
|
@@ -438,7 +438,7 @@ class TFBackend(Backend[tf.Tensor]):
|
|
|
438
438
|
:param device:
|
|
439
439
|
:return: range over dim
|
|
440
440
|
"""
|
|
441
|
-
if not dtype and dim.dyn_size_ext:
|
|
441
|
+
if not dtype and dim.dyn_size_ext is not None:
|
|
442
442
|
dtype = dim.dyn_size_ext.dtype
|
|
443
443
|
if not dtype:
|
|
444
444
|
dtype = rf.get_default_array_index_dtype()
|
|
@@ -829,7 +829,7 @@ class LayerBase:
|
|
|
829
829
|
assert not out_shape, "out_shape %r must be empty if not a set" % (out_shape,)
|
|
830
830
|
out_shape = set(out_shape)
|
|
831
831
|
out_shape.add(OptionalDim(over_rec_time_dim))
|
|
832
|
-
if over_rec_time_dim.dyn_size_ext:
|
|
832
|
+
if over_rec_time_dim.dyn_size_ext is not None:
|
|
833
833
|
for tag in over_rec_time_dim.dyn_size_ext.dim_tags:
|
|
834
834
|
if tag not in [d.tag if isinstance(d, _MarkedDim) else d for d in out_shape]:
|
|
835
835
|
out_shape.add(OptionalDim(tag))
|
|
@@ -2401,7 +2401,7 @@ class LengthLayer(LayerBase):
|
|
|
2401
2401
|
sparse=sparse,
|
|
2402
2402
|
dim=None if sparse else NotSpecified,
|
|
2403
2403
|
)
|
|
2404
|
-
if
|
|
2404
|
+
if dim.dyn_size_ext is None: # yet undefined
|
|
2405
2405
|
return Data(
|
|
2406
2406
|
name="%s_length" % name,
|
|
2407
2407
|
shape=(),
|
|
@@ -4503,7 +4503,7 @@ class MergeDimsLayer(_ConcatInputLayer):
|
|
|
4503
4503
|
return # should be handled already
|
|
4504
4504
|
if target_tag.dimension is not None: # static
|
|
4505
4505
|
return # should be handled already
|
|
4506
|
-
if target_tag.dyn_size_ext:
|
|
4506
|
+
if target_tag.dyn_size_ext is not None:
|
|
4507
4507
|
return # handled already
|
|
4508
4508
|
|
|
4509
4509
|
out_size = None
|
|
@@ -5690,7 +5690,7 @@ class RepeatLayer(_ConcatInputLayer):
|
|
|
5690
5690
|
out_dim_.declare_same_as(out_dim)
|
|
5691
5691
|
if data.batch:
|
|
5692
5692
|
out_dim_ = out_dim_.get_for_batch_ctx(data.batch, data.control_flow_ctx)
|
|
5693
|
-
if tag.dyn_size_ext and
|
|
5693
|
+
if tag.dyn_size_ext and out_dim_.dyn_size_ext is None:
|
|
5694
5694
|
out_dim_.dyn_size_ext = tag.dyn_size_ext.copy_template()
|
|
5695
5695
|
return data.copy_template_replace_dim_tag(axis=data.get_batch_axis(0), new_dim_tag=out_dim_)
|
|
5696
5696
|
|
|
@@ -6009,7 +6009,7 @@ class ReinterpretDataLayer(_ConcatInputLayer):
|
|
|
6009
6009
|
new_dyn_size_ext.placeholder = tf.identity(
|
|
6010
6010
|
new_dyn_size_ext.placeholder, name=get_valid_scope_name_from_str(new_dyn_size_ext.name)
|
|
6011
6011
|
)
|
|
6012
|
-
if new_tag.dyn_size_ext:
|
|
6012
|
+
if new_tag.dyn_size_ext is not None:
|
|
6013
6013
|
assert new_dyn_size_ext.dim_tags == new_tag.dyn_size_ext.dim_tags
|
|
6014
6014
|
new_tag.dyn_size_ext = new_dyn_size_ext
|
|
6015
6015
|
new_tag.set_tag_on_size_tensor(new_dyn_size_ext.placeholder)
|
|
@@ -1891,9 +1891,9 @@ class _SubnetworkRecCell:
|
|
|
1891
1891
|
assert old_dim.is_dynamic_seq_length() and new_dim.is_dynamic_seq_length()
|
|
1892
1892
|
if new_dim.dyn_size_ext and new_dim.dyn_size_ext.raw_tensor is not None:
|
|
1893
1893
|
continue
|
|
1894
|
-
if
|
|
1894
|
+
if old_dim.dyn_size_ext is None:
|
|
1895
1895
|
continue
|
|
1896
|
-
if
|
|
1896
|
+
if new_dim.dyn_size_ext is None:
|
|
1897
1897
|
out_dims[new_axis] = old_dim
|
|
1898
1898
|
continue
|
|
1899
1899
|
if old_dim.dyn_size_ext.raw_tensor is not None:
|
|
@@ -2488,7 +2488,7 @@ class _SubnetworkRecCell:
|
|
|
2488
2488
|
with tf.name_scope(layer.tf_scope_name):
|
|
2489
2489
|
out = layer.output.copy_as_batch_major().copy_with_time_dim_axis(1) # [B,T,...]
|
|
2490
2490
|
time_dim = out.dim_tags[1]
|
|
2491
|
-
if time_dim.dyn_size_ext:
|
|
2491
|
+
if time_dim.dyn_size_ext is not None:
|
|
2492
2492
|
indices = time_dim.dyn_size_ext.copy()
|
|
2493
2493
|
else:
|
|
2494
2494
|
indices = Data.from_tensor(tf_util.get_shape_dim(out.placeholder, 0))
|
|
@@ -1245,7 +1245,7 @@ class Engine(EngineBase):
|
|
|
1245
1245
|
In the callback, we pass each sequence without the batch dim,
|
|
1246
1246
|
so we must adapt the dim tags.
|
|
1247
1247
|
"""
|
|
1248
|
-
if
|
|
1248
|
+
if dim.dyn_size_ext is None:
|
|
1249
1249
|
return dim
|
|
1250
1250
|
if batch_dim not in dim.dyn_size_ext.dims:
|
|
1251
1251
|
return dim
|
|
@@ -998,7 +998,7 @@ class TorchBackend(Backend[torch.Tensor]):
|
|
|
998
998
|
else:
|
|
999
999
|
raise TypeError(f"Unsupported type for indices: {type(indices)}")
|
|
1000
1000
|
if clip_to_valid:
|
|
1001
|
-
if axis.dyn_size_ext:
|
|
1001
|
+
if axis.dyn_size_ext is not None:
|
|
1002
1002
|
indices = rf.clip_by_value(
|
|
1003
1003
|
indices, 0, axis.get_dyn_size_ext_for_device(indices.device) - 1, allow_broadcast_all_sources=True
|
|
1004
1004
|
)
|
|
@@ -1434,7 +1434,7 @@ class TorchBackend(Backend[torch.Tensor]):
|
|
|
1434
1434
|
:param device:
|
|
1435
1435
|
:return: tensor with shape [dim]
|
|
1436
1436
|
"""
|
|
1437
|
-
if not dtype and dim.dyn_size_ext:
|
|
1437
|
+
if not dtype and dim.dyn_size_ext is not None:
|
|
1438
1438
|
dtype = dim.dyn_size_ext.dtype
|
|
1439
1439
|
if not dtype:
|
|
1440
1440
|
dtype = rf.get_default_array_index_dtype()
|
|
@@ -1736,7 +1736,7 @@ class TorchBackend(Backend[torch.Tensor]):
|
|
|
1736
1736
|
out_raw = masked_select(in_raw, mask_raw, mask_len=known_mask_len)
|
|
1737
1737
|
if not out_dim:
|
|
1738
1738
|
out_dim = Dim(None, name="masked_select")
|
|
1739
|
-
if
|
|
1739
|
+
if out_dim.dyn_size_ext is None:
|
|
1740
1740
|
out_dim.dyn_size_ext = Tensor("masked_select_size", dims=(), dtype="int64")
|
|
1741
1741
|
if out_dim.dyn_size_ext.raw_tensor is None:
|
|
1742
1742
|
out_dim.dyn_size_ext.raw_tensor = torch.tensor(out_raw.shape[0], dtype=torch.int64)
|
{returnn-1.20250120.153919 → returnn-1.20250122.134518}/returnn/torch/util/exception_helper.py
RENAMED
|
@@ -45,7 +45,7 @@ def help_on_torch_exception(
|
|
|
45
45
|
if dim in covered_dim_tags:
|
|
46
46
|
continue
|
|
47
47
|
covered_dim_tags.add(dim)
|
|
48
|
-
if
|
|
48
|
+
if dim.dyn_size_ext is None:
|
|
49
49
|
continue
|
|
50
50
|
info, _ = _help_data_or_array(dim.dyn_size_ext.raw_tensor)
|
|
51
51
|
exc_ext.append(f" dim {dim.short_repr()} size: {info}")
|