xformers 0.0.34.dev1102__tar.gz → 0.0.34.dev1116__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- xformers-0.0.34.dev1116/MANIFEST.in +17 -0
- xformers-0.0.34.dev1116/PKG-INFO +31 -0
- xformers-0.0.34.dev1116/README.md +135 -0
- xformers-0.0.34.dev1116/pyproject.toml +11 -0
- xformers-0.0.34.dev1116/requirements.txt +4 -0
- xformers-0.0.34.dev1116/setup.py +651 -0
- xformers-0.0.34.dev1116/tests/test_attention_patterns.py +236 -0
- xformers-0.0.34.dev1116/tests/test_sparsity24.py +1177 -0
- xformers-0.0.34.dev1116/version.txt +1 -0
- xformers-0.0.34.dev1116/xformers/csrc/attention/attention.cpp +37 -0
- xformers-0.0.34.dev1116/xformers/csrc/nvcc_info.cu +15 -0
- xformers-0.0.34.dev1116/xformers/csrc/pt_stable_utils.cpp +36 -0
- xformers-0.0.34.dev1116/xformers/csrc/pt_stable_utils.h +234 -0
- xformers-0.0.34.dev1116/xformers/csrc/sparse24/gemm.cu +369 -0
- xformers-0.0.34.dev1116/xformers/csrc/sparse24/meta_utils.cu +206 -0
- xformers-0.0.34.dev1116/xformers/csrc/sparse24/sparse24.cpp +20 -0
- xformers-0.0.34.dev1116/xformers/csrc/sparse24/sparse24_apply.cu +153 -0
- xformers-0.0.34.dev1116/xformers/csrc/sparse24/sparse24_apply_dense_output.cu +228 -0
- xformers-0.0.34.dev1116/xformers/csrc/sparse24/sparse24_gemm_sm90.cu +462 -0
- xformers-0.0.34.dev1116/xformers/csrc/sparse24/sparse24_largest_mask_2d.cu +242 -0
- xformers-0.0.34.dev1116/xformers/csrc/sparse24/sparse24_metadata.h +289 -0
- xformers-0.0.34.dev1116/xformers/csrc/sparse24/sparse24_pack.cu +158 -0
- xformers-0.0.34.dev1116/xformers/csrc/sparse24/sparse24_pack_test.cu +64 -0
- xformers-0.0.34.dev1116/xformers/csrc/sparse24/sparseNM_dense.cu +122 -0
- xformers-0.0.34.dev1116/xformers/ops/fmha/_triton/splitk_kernels.py +1273 -0
- xformers-0.0.34.dev1116/xformers/ops/fmha/flash.py +810 -0
- xformers-0.0.34.dev1116/xformers/ops/fmha/flash3.py +908 -0
- xformers-0.0.34.dev1116/xformers.egg-info/PKG-INFO +31 -0
- xformers-0.0.34.dev1116/xformers.egg-info/SOURCES.txt +8928 -0
- xformers-0.0.34.dev1116/xformers.egg-info/requires.txt +2 -0
- xformers-0.0.34.dev1102/MANIFEST.in +0 -19
- xformers-0.0.34.dev1102/PKG-INFO +0 -31
- xformers-0.0.34.dev1102/README.md +0 -135
- xformers-0.0.34.dev1102/pyproject.toml +0 -11
- xformers-0.0.34.dev1102/requirements.txt +0 -4
- xformers-0.0.34.dev1102/setup.py +0 -838
- xformers-0.0.34.dev1102/tests/test_attention_mask.py +0 -61
- xformers-0.0.34.dev1102/tests/test_attention_patterns.py +0 -475
- xformers-0.0.34.dev1102/tests/test_attention_utils.py +0 -40
- xformers-0.0.34.dev1102/tests/test_core_attention.py +0 -121
- xformers-0.0.34.dev1102/tests/test_residual.py +0 -34
- xformers-0.0.34.dev1102/tests/test_sparsity24.py +0 -1199
- xformers-0.0.34.dev1102/third_party/flash-attention/version.txt +0 -1
- xformers-0.0.34.dev1102/version.txt +0 -1
- xformers-0.0.34.dev1102/xformers/_flash_attn/__init__.py +0 -11
- xformers-0.0.34.dev1102/xformers/_flash_attn/bert_padding.py +0 -218
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/__init__.py +0 -21
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/ampere_helpers.py +0 -103
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/barrier.py +0 -71
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/benchmark.py +0 -268
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/benchmark_mask_mod.py +0 -688
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/blackwell_helpers.py +0 -753
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/block_info.py +0 -89
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/block_sparsity.py +0 -592
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/copy_utils.py +0 -340
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/cute_dsl_utils.py +0 -124
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/fast_math.py +0 -97
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/flash_bwd.py +0 -1260
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/flash_bwd_postprocess.py +0 -728
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/flash_bwd_preprocess.py +0 -362
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/flash_bwd_sm100.py +0 -2363
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/flash_bwd_sm90.py +0 -1242
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/flash_fwd.py +0 -2677
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/flash_fwd_combine.py +0 -644
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/flash_fwd_sm100.py +0 -2296
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/hopper_helpers.py +0 -102
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/interface.py +0 -1386
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/mask.py +0 -407
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/mask_definitions.py +0 -285
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/mma_sm100_desc.py +0 -291
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/named_barrier.py +0 -31
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/pack_gqa.py +0 -166
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/pipeline.py +0 -365
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/seqlen_info.py +0 -82
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/softmax.py +0 -440
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/testing.py +0 -404
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/tile_scheduler.py +0 -615
- xformers-0.0.34.dev1102/xformers/_flash_attn/cute/utils.py +0 -783
- xformers-0.0.34.dev1102/xformers/_flash_attn/flash_attn_interface.py +0 -1616
- xformers-0.0.34.dev1102/xformers/_flash_attn/flash_attn_triton.py +0 -1160
- xformers-0.0.34.dev1102/xformers/_flash_attn/flash_attn_triton_amd/__init__.py +0 -0
- xformers-0.0.34.dev1102/xformers/_flash_attn/flash_attn_triton_amd/bench.py +0 -1223
- xformers-0.0.34.dev1102/xformers/_flash_attn/flash_attn_triton_amd/bwd_prefill.py +0 -814
- xformers-0.0.34.dev1102/xformers/_flash_attn/flash_attn_triton_amd/bwd_prefill_fused.py +0 -3266
- xformers-0.0.34.dev1102/xformers/_flash_attn/flash_attn_triton_amd/bwd_prefill_onekernel.py +0 -1091
- xformers-0.0.34.dev1102/xformers/_flash_attn/flash_attn_triton_amd/bwd_prefill_split.py +0 -1354
- xformers-0.0.34.dev1102/xformers/_flash_attn/flash_attn_triton_amd/bwd_ref.py +0 -478
- xformers-0.0.34.dev1102/xformers/_flash_attn/flash_attn_triton_amd/fp8.py +0 -716
- xformers-0.0.34.dev1102/xformers/_flash_attn/flash_attn_triton_amd/fwd_decode.py +0 -814
- xformers-0.0.34.dev1102/xformers/_flash_attn/flash_attn_triton_amd/fwd_prefill.py +0 -649
- xformers-0.0.34.dev1102/xformers/_flash_attn/flash_attn_triton_amd/fwd_ref.py +0 -387
- xformers-0.0.34.dev1102/xformers/_flash_attn/flash_attn_triton_amd/interface_fa.py +0 -792
- xformers-0.0.34.dev1102/xformers/_flash_attn/flash_attn_triton_amd/test.py +0 -932
- xformers-0.0.34.dev1102/xformers/_flash_attn/flash_attn_triton_amd/train.py +0 -403
- xformers-0.0.34.dev1102/xformers/_flash_attn/flash_attn_triton_amd/utils.py +0 -775
- xformers-0.0.34.dev1102/xformers/_flash_attn/flash_attn_triton_og.py +0 -365
- xformers-0.0.34.dev1102/xformers/_flash_attn/flash_blocksparse_attention.py +0 -197
- xformers-0.0.34.dev1102/xformers/_flash_attn/flash_blocksparse_attn_interface.py +0 -200
- xformers-0.0.34.dev1102/xformers/_flash_attn/layers/__init__.py +0 -0
- xformers-0.0.34.dev1102/xformers/_flash_attn/layers/patch_embed.py +0 -67
- xformers-0.0.34.dev1102/xformers/_flash_attn/layers/rotary.py +0 -482
- xformers-0.0.34.dev1102/xformers/_flash_attn/losses/__init__.py +0 -0
- xformers-0.0.34.dev1102/xformers/_flash_attn/losses/cross_entropy.py +0 -85
- xformers-0.0.34.dev1102/xformers/_flash_attn/models/__init__.py +0 -0
- xformers-0.0.34.dev1102/xformers/_flash_attn/models/baichuan.py +0 -151
- xformers-0.0.34.dev1102/xformers/_flash_attn/models/bert.py +0 -764
- xformers-0.0.34.dev1102/xformers/_flash_attn/models/bigcode.py +0 -233
- xformers-0.0.34.dev1102/xformers/_flash_attn/models/btlm.py +0 -102
- xformers-0.0.34.dev1102/xformers/_flash_attn/models/falcon.py +0 -143
- xformers-0.0.34.dev1102/xformers/_flash_attn/models/gpt.py +0 -1080
- xformers-0.0.34.dev1102/xformers/_flash_attn/models/gpt_neox.py +0 -124
- xformers-0.0.34.dev1102/xformers/_flash_attn/models/gptj.py +0 -109
- xformers-0.0.34.dev1102/xformers/_flash_attn/models/llama.py +0 -422
- xformers-0.0.34.dev1102/xformers/_flash_attn/models/opt.py +0 -116
- xformers-0.0.34.dev1102/xformers/_flash_attn/models/vit.py +0 -373
- xformers-0.0.34.dev1102/xformers/_flash_attn/modules/__init__.py +0 -0
- xformers-0.0.34.dev1102/xformers/_flash_attn/modules/block.py +0 -397
- xformers-0.0.34.dev1102/xformers/_flash_attn/modules/embedding.py +0 -216
- xformers-0.0.34.dev1102/xformers/_flash_attn/modules/mha.py +0 -993
- xformers-0.0.34.dev1102/xformers/_flash_attn/modules/mlp.py +0 -191
- xformers-0.0.34.dev1102/xformers/_flash_attn/ops/__init__.py +0 -0
- xformers-0.0.34.dev1102/xformers/_flash_attn/ops/activations.py +0 -135
- xformers-0.0.34.dev1102/xformers/_flash_attn/ops/fused_dense.py +0 -688
- xformers-0.0.34.dev1102/xformers/_flash_attn/ops/layer_norm.py +0 -800
- xformers-0.0.34.dev1102/xformers/_flash_attn/ops/rms_norm.py +0 -174
- xformers-0.0.34.dev1102/xformers/_flash_attn/ops/triton/__init__.py +0 -1
- xformers-0.0.34.dev1102/xformers/_flash_attn/ops/triton/cross_entropy.py +0 -330
- xformers-0.0.34.dev1102/xformers/_flash_attn/ops/triton/k_activations.py +0 -162
- xformers-0.0.34.dev1102/xformers/_flash_attn/ops/triton/layer_norm.py +0 -1252
- xformers-0.0.34.dev1102/xformers/_flash_attn/ops/triton/linear.py +0 -594
- xformers-0.0.34.dev1102/xformers/_flash_attn/ops/triton/mlp.py +0 -149
- xformers-0.0.34.dev1102/xformers/_flash_attn/ops/triton/rotary.py +0 -185
- xformers-0.0.34.dev1102/xformers/_flash_attn/utils/__init__.py +0 -0
- xformers-0.0.34.dev1102/xformers/_flash_attn/utils/benchmark.py +0 -268
- xformers-0.0.34.dev1102/xformers/_flash_attn/utils/distributed.py +0 -144
- xformers-0.0.34.dev1102/xformers/_flash_attn/utils/generation.py +0 -740
- xformers-0.0.34.dev1102/xformers/_flash_attn/utils/library.py +0 -66
- xformers-0.0.34.dev1102/xformers/_flash_attn/utils/pretrained.py +0 -79
- xformers-0.0.34.dev1102/xformers/_flash_attn/utils/testing.py +0 -360
- xformers-0.0.34.dev1102/xformers/_flash_attn/utils/torch.py +0 -21
- xformers-0.0.34.dev1102/xformers/benchmarks/LRA/batch_fetch_results.py +0 -96
- xformers-0.0.34.dev1102/xformers/benchmarks/LRA/batch_submit.py +0 -49
- xformers-0.0.34.dev1102/xformers/benchmarks/LRA/code/dataset.py +0 -46
- xformers-0.0.34.dev1102/xformers/benchmarks/LRA/code/model_wrapper.py +0 -288
- xformers-0.0.34.dev1102/xformers/benchmarks/LRA/run_grid_search.py +0 -148
- xformers-0.0.34.dev1102/xformers/benchmarks/LRA/run_tasks.py +0 -302
- xformers-0.0.34.dev1102/xformers/benchmarks/LRA/run_with_submitit.py +0 -153
- xformers-0.0.34.dev1102/xformers/benchmarks/benchmark_nystrom_utils.py +0 -101
- xformers-0.0.34.dev1102/xformers/benchmarks/benchmark_revnet.py +0 -83
- xformers-0.0.34.dev1102/xformers/components/__init__.py +0 -32
- xformers-0.0.34.dev1102/xformers/components/attention/__init__.py +0 -106
- xformers-0.0.34.dev1102/xformers/components/attention/attention_mask.py +0 -143
- xformers-0.0.34.dev1102/xformers/components/attention/attention_patterns.py +0 -295
- xformers-0.0.34.dev1102/xformers/components/attention/base.py +0 -95
- xformers-0.0.34.dev1102/xformers/components/attention/core.py +0 -157
- xformers-0.0.34.dev1102/xformers/components/attention/fourier_mix.py +0 -35
- xformers-0.0.34.dev1102/xformers/components/attention/scaled_dot_product.py +0 -134
- xformers-0.0.34.dev1102/xformers/components/attention/sparsity_config.py +0 -812
- xformers-0.0.34.dev1102/xformers/components/attention/utils.py +0 -108
- xformers-0.0.34.dev1102/xformers/components/input_projection.py +0 -102
- xformers-0.0.34.dev1102/xformers/components/residual.py +0 -192
- xformers-0.0.34.dev1102/xformers/csrc/attention/attention.cpp +0 -38
- xformers-0.0.34.dev1102/xformers/csrc/nvcc_info.cu +0 -16
- xformers-0.0.34.dev1102/xformers/csrc/sparse24/gemm.cu +0 -366
- xformers-0.0.34.dev1102/xformers/csrc/sparse24/meta_utils.cu +0 -193
- xformers-0.0.34.dev1102/xformers/csrc/sparse24/sparse24.cpp +0 -25
- xformers-0.0.34.dev1102/xformers/csrc/sparse24/sparse24_apply.cu +0 -149
- xformers-0.0.34.dev1102/xformers/csrc/sparse24/sparse24_apply_dense_output.cu +0 -239
- xformers-0.0.34.dev1102/xformers/csrc/sparse24/sparse24_gemm_sm90.cu +0 -453
- xformers-0.0.34.dev1102/xformers/csrc/sparse24/sparse24_largest_mask_2d.cu +0 -238
- xformers-0.0.34.dev1102/xformers/csrc/sparse24/sparse24_metadata.h +0 -274
- xformers-0.0.34.dev1102/xformers/csrc/sparse24/sparse24_pack.cu +0 -179
- xformers-0.0.34.dev1102/xformers/csrc/sparse24/sparse24_pack_test.cu +0 -54
- xformers-0.0.34.dev1102/xformers/csrc/sparse24/sparseNM_dense.cu +0 -123
- xformers-0.0.34.dev1102/xformers/ops/fmha/_triton/__init__.py +0 -4
- xformers-0.0.34.dev1102/xformers/ops/fmha/_triton/splitk_kernels.py +0 -1273
- xformers-0.0.34.dev1102/xformers/ops/fmha/flash.py +0 -814
- xformers-0.0.34.dev1102/xformers/ops/fmha/flash3.py +0 -924
- xformers-0.0.34.dev1102/xformers/triton/__init__.py +0 -4
- xformers-0.0.34.dev1102/xformers.egg-info/PKG-INFO +0 -31
- xformers-0.0.34.dev1102/xformers.egg-info/SOURCES.txt +0 -9050
- xformers-0.0.34.dev1102/xformers.egg-info/requires.txt +0 -2
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/LICENSE +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/setup.cfg +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/tests/test_checkpoint.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/tests/test_fmha_flop_formula.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/tests/test_fmha_merge_attentions.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/tests/test_fwbw_overlap.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/tests/test_indexing.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/tests/test_mem_eff_attention.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/tests/test_multiprocessing_utils.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/tests/test_profiler.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/tests/test_rmsnorm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/tests/test_rope_padded.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/tests/test_seqpar.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/tests/test_sequence_parallel_fused_ops.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/tests/test_sparse_tensors.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/tests/test_splitk_reference.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/tests/test_tiled_matmul.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/tests/test_tree_attention.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/tests/test_triton_varargs.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/tests/test_unbind.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/00_basic_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/00_basic_gemm/basic_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/01_cutlass_utilities/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/01_cutlass_utilities/cutlass_utilities.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/02_dump_reg_shmem/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/02_dump_reg_shmem/dump_reg_shmem.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/03_visualize_layout/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/03_visualize_layout/options.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/03_visualize_layout/register_layout.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/03_visualize_layout/register_layout.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/03_visualize_layout/visualize_layout.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/03_visualize_layout/visualize_layout.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/04_tile_iterator/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/04_tile_iterator/tile_iterator.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/05_batched_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/05_batched_gemm/batched_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/06_splitK_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/06_splitK_gemm/splitk_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/07_volta_tensorop_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/07_volta_tensorop_gemm/volta_tensorop_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/08_turing_tensorop_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/08_turing_tensorop_gemm/turing_tensorop_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/09_turing_tensorop_conv2dfprop/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/09_turing_tensorop_conv2dfprop/turing_tensorop_conv2dfprop.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/10_planar_complex/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/10_planar_complex/planar_complex.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/11_planar_complex_array/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/11_planar_complex_array/planar_complex_array.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/12_gemm_bias_relu/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/12_gemm_bias_relu/gemm_bias_relu.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/b2b_conv2d_run.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/b2b_gemm_run.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/b2b_grouped_gemm_run.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/b2b_interleaved_conv2d_run.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/b2b_interleaved_gemm_run.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/device/b2b_gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/device/b2b_implicit_gemm_convolution.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/fused_two_convs_f16_sm75_rf.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/fused_two_convs_f16_sm75_shmem.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/fused_two_convs_f16_sm80_rf.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/fused_two_convs_f16_sm80_shmem.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/fused_two_convs_s8_sm75_rf.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/fused_two_convs_s8_sm75_shmem.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/fused_two_convs_s8_sm80_rf.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/fused_two_convs_s8_sm80_shmem.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/fused_two_gemms_f16_sm75_rf.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/fused_two_gemms_f16_sm75_shmem.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/fused_two_gemms_f16_sm80_rf.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/fused_two_gemms_f16_sm80_shmem.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/fused_two_gemms_grouped_f16_sm80_rf.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/fused_two_gemms_s8_sm75_rf.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/fused_two_gemms_s8_sm75_shmem.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/fused_two_gemms_s8_sm80_rf.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/fused_two_gemms_s8_sm80_shmem.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/kernel/b2b_gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/kernel/b2b_gemm_grouped_problem_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/kernel/b2b_implicit_gemm_convolution.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/kernel/default_b2b_conv2d_fprop.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/kernel/default_b2b_conv2d_fprop_sm75.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/kernel/default_b2b_conv2d_fprop_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/kernel/default_b2b_conv2d_fprop_smem_accumulator_sm75.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/kernel/default_b2b_conv2d_fprop_smem_accumulator_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/kernel/default_b2b_gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/kernel/default_b2b_gemm_smem_accumulator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/kernel/grouped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/reference/device/tensor_scale_bias.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/test_run.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/threadblock/b2b_implicit_gemm_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/threadblock/b2b_implicit_gemm_multistage_smem_accumulator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/threadblock/b2b_implicit_gemm_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/threadblock/b2b_implicit_gemm_pipelined_smem_accumulator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/threadblock/b2b_mma_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/threadblock/b2b_mma_base_smem_accumulator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/threadblock/b2b_mma_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/threadblock/b2b_mma_multistage_smem_accumulator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/threadblock/b2b_mma_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/threadblock/b2b_mma_pipelined_smem_accumulator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/threadblock/default_b2b_mma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/threadblock/default_b2b_mma_smem_accumulator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/13_two_tensor_op_fusion/threadblock/grouped_threadblock_swizzle.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/14_ampere_tf32_tensorop_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/14_ampere_tf32_tensorop_gemm/ampere_tf32_tensorop_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/15_ampere_sparse_tensorop_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/15_ampere_sparse_tensorop_gemm/ampere_sparse_tensorop_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/15_ampere_sparse_tensorop_gemm/ampere_sparse_tensorop_gemm_universal.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/15_ampere_sparse_tensorop_gemm/ampere_sparse_tensorop_gemm_with_visitor.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/16_ampere_tensorop_conv2dfprop/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/16_ampere_tensorop_conv2dfprop/ampere_tensorop_conv2dfprop.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/17_fprop_per_channel_bias/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/17_fprop_per_channel_bias/fprop_per_channel_bias.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/18_ampere_fp64_tensorop_affine2_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/18_ampere_fp64_tensorop_affine2_gemm/ampere_fp64_tensorop_affine2_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/19_tensorop_canonical/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/19_tensorop_canonical/tensorop_canonical.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/20_simt_canonical/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/20_simt_canonical/simt_canonical.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/21_quaternion_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/21_quaternion_gemm/quaternion_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/22_quaternion_conv/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/22_quaternion_conv/quaternion_conv.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/23_ampere_gemm_operand_reduction_fusion/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/23_ampere_gemm_operand_reduction_fusion/ampere_gemm_operand_reduction_fusion.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/24_gemm_grouped/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/24_gemm_grouped/gemm_grouped.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/25_ampere_fprop_mainloop_fusion/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/25_ampere_fprop_mainloop_fusion/ampere_3d_fprop_mainloop_fusion.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/25_ampere_fprop_mainloop_fusion/ampere_fprop_mainloop_fusion.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/26_ampere_wgrad_mainloop_fusion/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/26_ampere_wgrad_mainloop_fusion/ampere_wgrad_mainloop_fusion.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/27_ampere_3xtf32_fast_accurate_tensorop_gemm/27_ampere_3xtf32_fast_accurate_tensorop_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/27_ampere_3xtf32_fast_accurate_tensorop_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/28_ampere_3xtf32_fast_accurate_tensorop_fprop/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/28_ampere_3xtf32_fast_accurate_tensorop_fprop/ampere_3xtf32_fast_accurate_tensorop_fprop.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/29_ampere_3xtf32_fast_accurate_tensorop_complex_gemm/29_3xtf32_complex_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/29_ampere_3xtf32_fast_accurate_tensorop_complex_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/30_wgrad_split_k/30_wgrad_split_k.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/30_wgrad_split_k/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/31_basic_syrk/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/31_basic_syrk/basic_syrk.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/32_basic_trmm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/32_basic_trmm/basic_trmm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/33_ampere_3xtf32_tensorop_symm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/33_ampere_3xtf32_tensorop_symm/ampere_3xtf32_tensorop_symm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/34_transposed_conv2d/34_transposed_conv2d.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/34_transposed_conv2d/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/35_gemm_softmax/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/35_gemm_softmax/gemm_softmax.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/35_gemm_softmax/gemm_with_epilogue_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/35_gemm_softmax/gemm_with_softmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/36_gather_scatter_fusion/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/36_gather_scatter_fusion/gather_scatter_fusion.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/37_gemm_layernorm_gemm_fusion/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/37_gemm_layernorm_gemm_fusion/gemm_layernorm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/37_gemm_layernorm_gemm_fusion/gemm_with_epilogue_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/37_gemm_layernorm_gemm_fusion/gemm_with_layernorm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/38_syr2k_grouped/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/38_syr2k_grouped/syr2k_grouped.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/39_gemm_permute/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/39_gemm_permute/gemm_permute.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/39_gemm_permute/layouts.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/39_gemm_permute/permute_info.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/40_cutlass_py/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/40_cutlass_py/conv2d.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/40_cutlass_py/customizable/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/40_cutlass_py/customizable/conv2d.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/40_cutlass_py/customizable/gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/40_cutlass_py/customizable/gemm_grouped.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/40_cutlass_py/customizable/grouped_gemm_problem_size.csv +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/40_cutlass_py/gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/40_cutlass_py/gemm_grouped.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/debug_utils.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/default_fmha_grouped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/epilogue/epilogue_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/epilogue/epilogue_rescale_output.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/epilogue/epilogue_thread_apply_logsumexp.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/fmha_backward_test.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/fmha_grouped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/fmha_grouped_problem_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/fused_multi_head_attention_backward.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/fused_multihead_attention_fixed_seqlen.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/fused_multihead_attention_variable_seqlen.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/gemm/custom_mma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/gemm/custom_mma_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/gemm/custom_mma_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/gemm/custom_mma_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/gemm/find_default_mma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/gemm/mma_accum_lambda_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/gemm/mma_from_smem.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/gemm_kernel_utils.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/iterators/default_warp_iterator_from_smem.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/iterators/epilogue_predicated_tile_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/iterators/make_residual_last.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/iterators/predicated_tile_access_iterator_residual_last.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/iterators/predicated_tile_iterator_residual_last.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/iterators/transpose_warp_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/iterators/warp_iterator_from_smem.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/kernel_backward.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/kernel_forward.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/piped_subprocess.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/41_fused_multi_head_attention/transform/tile_smem_loader.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/42_ampere_tensorop_group_conv/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/42_ampere_tensorop_group_conv/ampere_tensorop_group_conv.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/43_ell_block_sparse_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/43_ell_block_sparse_gemm/ell_block_sparse_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/config.json +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/fixed_impl/epilogue/threadblock/default_bias_act_epilogue_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/fixed_impl/epilogue/threadblock/default_thread_map_tensor_op_for_fused_bias.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/fixed_impl/epilogue/threadblock/fused_bias_act_epilogue.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/fixed_impl/epilogue/threadblock/output_tile_thread_map_for_fused_bias.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/fixed_impl/epilogue/warp/fused_bias_act_fragment_iterator_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/fixed_impl/gemm/warp/mma_tensor_op_fragment_iterator_without_output_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/gen_all_code.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/gen_cmake.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/gen_customized_epilogue.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/gen_device.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/gen_ir.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/gen_kernel.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/gen_sample.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/gen_threadblock.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/gen_turing_and_volta.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/gen_verify.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/generate.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/helper.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/replace_fix_impl_header.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/leaky_bias.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/44_multi_gemm_ir_and_codegen/utils.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/45_dual_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/45_dual_gemm/device/dual_gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/45_dual_gemm/dual_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/45_dual_gemm/dual_gemm_common.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/45_dual_gemm/dual_gemm_run.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/45_dual_gemm/kernel/dual_gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/45_dual_gemm/test_run.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/45_dual_gemm/thread/left_silu_and_mul.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/45_dual_gemm/threadblock/dual_epilogue.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/45_dual_gemm/threadblock/dual_mma_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/45_dual_gemm/threadblock/dual_mma_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/46_depthwise_simt_conv2dfprop/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/46_depthwise_simt_conv2dfprop/depthwise_simt_conv2dfprop.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/47_ampere_gemm_universal_streamk/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/47_ampere_gemm_universal_streamk/ampere_gemm_universal_streamk.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/47_ampere_gemm_universal_streamk/ampere_gemm_universal_streamk_broadcast.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/48_hopper_warp_specialized_gemm/48_hopper_warp_specialized_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/48_hopper_warp_specialized_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/49_hopper_gemm_with_collective_builder/49_collective_builder.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/49_hopper_gemm_with_collective_builder/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/50_hopper_gemm_with_epilogue_swizzle/50_hopper_gemm_with_epilogue_swizzle.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/50_hopper_gemm_with_epilogue_swizzle/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/51_hopper_gett/51_hopper_gett.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/51_hopper_gett/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/51_hopper_gett/gett_kernel.cuh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/52_hopper_gather_scatter_fusion/52_hopper_gather_scatter_fusion.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/52_hopper_gather_scatter_fusion/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/52_hopper_gather_scatter_fusion/gather_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/52_hopper_gather_scatter_fusion/gather_kernel.cuh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/52_hopper_gather_scatter_fusion/scatter_epilogue.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/53_hopper_gemm_permute/53_hopper_gemm_permute.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/53_hopper_gemm_permute/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/53_hopper_gemm_permute/permute_kernel.cuh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/53_hopper_gemm_permute/permute_traits.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/54_hopper_fp8_warp_specialized_gemm/54_hopper_fp8_warp_specialized_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/54_hopper_fp8_warp_specialized_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/54_hopper_fp8_warp_specialized_gemm/hopper_fp8_commandline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/55_hopper_mixed_dtype_gemm/55_hopper_int4_bf16_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/55_hopper_mixed_dtype_gemm/55_hopper_int4_fp8_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/55_hopper_mixed_dtype_gemm/55_hopper_mixed_dtype_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/55_hopper_mixed_dtype_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/55_hopper_mixed_dtype_gemm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/55_hopper_mixed_dtype_gemm/mixed_dtype_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/56_hopper_ptr_array_batched_gemm/56_hopper_ptr_array_batched_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/56_hopper_ptr_array_batched_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/57_hopper_grouped_gemm/57_hopper_grouped_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/57_hopper_grouped_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/58_ada_fp8_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/58_ada_fp8_gemm/ada_fp8_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/59_ampere_gather_scatter_conv/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/59_ampere_gather_scatter_conv/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/59_ampere_gather_scatter_conv/ampere_conv_kernel.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/59_ampere_gather_scatter_conv/ampere_gather_scatter_conv.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/60_cutlass_import/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/60_cutlass_import/main.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/61_hopper_gemm_with_topk_and_softmax/61_hopper_gemm_with_topk_and_softmax.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/61_hopper_gemm_with_topk_and_softmax/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/62_hopper_sparse_gemm/62_hopper_sparse_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/62_hopper_sparse_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/63_hopper_gemm_with_weight_prefetch/63_hopper_gemm_with_weight_prefetch.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/63_hopper_gemm_with_weight_prefetch/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/63_hopper_gemm_with_weight_prefetch/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/63_hopper_gemm_with_weight_prefetch/collective/builder.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/63_hopper_gemm_with_weight_prefetch/collective/dispatch_policy_extra.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/63_hopper_gemm_with_weight_prefetch/collective/sm90_mma_tma_gmma_ss_warpspecialized_with_prefetch.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/63_hopper_gemm_with_weight_prefetch/gemm_with_weight_prefetch_commandline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/63_hopper_gemm_with_weight_prefetch/kernel/sm90_gemm_tma_warpspecialized_with_prefetch.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/63_hopper_gemm_with_weight_prefetch/pipeline/prefetch_pipeline_sm90.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/64_ada_fp8_gemm_grouped/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/64_ada_fp8_gemm_grouped/ada_fp8_gemm_grouped.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/65_distributed_gemm/65_distributed_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/65_distributed_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/65_distributed_gemm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/65_distributed_gemm/REQUIREMENTS.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/67_hopper_fp8_warp_specialized_gemm_with_blockwise_scaling/67_hopper_fp8_warp_specialized_gemm_with_blockwise_scaling.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/67_hopper_fp8_warp_specialized_gemm_with_blockwise_scaling/67_hopper_fp8_warp_specialized_gemm_with_groupwise_scaling.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/67_hopper_fp8_warp_specialized_gemm_with_blockwise_scaling/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/67_hopper_fp8_warp_specialized_gemm_with_blockwise_scaling/hopper_fp8_commandline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/68_hopper_fp8_warp_specialized_grouped_gemm_with_blockwise_scaling/68_hopper_fp8_warp_specialized_grouped_gemm_with_blockwise_scaling.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/68_hopper_fp8_warp_specialized_grouped_gemm_with_blockwise_scaling/68_hopper_fp8_warp_specialized_grouped_gemm_with_blockwise_scaling_with_sparse_groups.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/68_hopper_fp8_warp_specialized_grouped_gemm_with_blockwise_scaling/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/68_hopper_fp8_warp_specialized_grouped_gemm_with_blockwise_scaling/hopper_fp8_commandline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/69_hopper_mixed_dtype_grouped_gemm/69_hopper_int4_bf16_grouped_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/69_hopper_mixed_dtype_grouped_gemm/69_hopper_int4_fp8_grouped_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/69_hopper_mixed_dtype_grouped_gemm/69_hopper_mixed_dtype_grouped_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/69_hopper_mixed_dtype_grouped_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/69_hopper_mixed_dtype_grouped_gemm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/69_hopper_mixed_dtype_grouped_gemm/grouped_mixed_dtype_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/70_blackwell_gemm/70_blackwell_fp16_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/70_blackwell_gemm/70_blackwell_fp8_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/70_blackwell_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/71_blackwell_gemm_with_collective_builder/71_blackwell_gemm_with_collective_builder.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/71_blackwell_gemm_with_collective_builder/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/72_blackwell_narrow_precision_gemm/72a_blackwell_nvfp4_bf16_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/72_blackwell_narrow_precision_gemm/72b_blackwell_nvfp4_nvfp4_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/72_blackwell_narrow_precision_gemm/72c_blackwell_mixed_mxfp8_bf16_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/72_blackwell_narrow_precision_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/73_blackwell_gemm_preferred_cluster/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/73_blackwell_gemm_preferred_cluster/blackwell_gemm_preferred_cluster.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/74_blackwell_gemm_streamk/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/74_blackwell_gemm_streamk/blackwell_gemm_streamk.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/75_blackwell_grouped_gemm/75_blackwell_grouped_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/75_blackwell_grouped_gemm/75_blackwell_grouped_gemm_block_scaled.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/75_blackwell_grouped_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/76_blackwell_conv/76_blackwell_conv_dgrad.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/76_blackwell_conv/76_blackwell_conv_fprop.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/76_blackwell_conv/76_blackwell_conv_wgrad.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/76_blackwell_conv/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/77_blackwell_fmha.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/77_blackwell_fmha_bwd.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/77_blackwell_fmha_gen.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/77_blackwell_mla.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/77_blackwell_mla_fwd.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/collective/fmha_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/collective/fmha_fusion.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/collective/sm100_fmha_fwd_epilogue_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/collective/sm100_fmha_fwd_mainloop_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/collective/sm100_fmha_gen_epilogue_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/collective/sm100_fmha_gen_mainloop_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/collective/sm100_fmha_load_cpasync_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/collective/sm100_fmha_load_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/collective/sm100_fmha_mla_fwd_mainloop_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/collective/sm100_fmha_mla_load_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/common/pipeline_mla.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/common/pow_2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/device/fmha.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/device/fmha_device_bwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/device/sm100_mla.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/kernel/fmha_causal_tile_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/kernel/fmha_kernel_bwd_convert.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/kernel/fmha_kernel_bwd_sum_OdO.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/kernel/fmha_options.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/kernel/fmha_tile_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/kernel/sm100_fmha_bwd_kernel_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/kernel/sm100_fmha_bwd_mla_kernel_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/kernel/sm100_fmha_fwd_kernel_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/kernel/sm100_fmha_gen_kernel_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/kernel/sm100_fmha_mla_reduction.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/kernel/sm100_fmha_mla_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/kernel/sm100_mla_tile_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/reference/fmha_bwd_reference.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/reference/fmha_fwd_gen_reference.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/reference/fmha_fwd_reference.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/reference/fmha_mla_reference.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/77_blackwell_fmha/reference/reference_abs_error.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/78_blackwell_emulated_bf16x9_gemm/78_blackwell_emulated_bf16x9_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/78_blackwell_emulated_bf16x9_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/79_blackwell_geforce_gemm/79a_blackwell_geforce_nvfp4_bf16_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/79_blackwell_geforce_gemm/79b_blackwell_geforce_nvfp4_nvfp4_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/79_blackwell_geforce_gemm/79c_blackwell_geforce_mixed_mxfp8_mxfp6_bf16_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/79_blackwell_geforce_gemm/79d_blackwell_geforce_nvfp4_grouped_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/79_blackwell_geforce_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/80_blackwell_geforce_sparse_gemm/80a_blackwell_geforce_mxfp8_bf16_sparse_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/80_blackwell_geforce_sparse_gemm/80b_blackwell_geforce_nvfp4_nvfp4_sparse_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/80_blackwell_geforce_sparse_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/81_blackwell_gemm_blockwise/81_blackwell_gemm_blockwise.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/81_blackwell_gemm_blockwise/81_blackwell_gemm_groupwise.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/81_blackwell_gemm_blockwise/81_blackwell_grouped_gemm_blockwise.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/81_blackwell_gemm_blockwise/81_blackwell_grouped_gemm_groupwise.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/81_blackwell_gemm_blockwise/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/81_blackwell_gemm_blockwise/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/82_blackwell_distributed_gemm/82_blackwell_distributed_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/82_blackwell_distributed_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/82_blackwell_distributed_gemm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/82_blackwell_distributed_gemm/REQUIREMENTS.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/83_blackwell_sparse_gemm/83_blackwell_sparse_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/83_blackwell_sparse_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/84_blackwell_narrow_precision_sparse_gemm/84a_blackwell_nvfp4_bf16_sparse_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/84_blackwell_narrow_precision_sparse_gemm/84b_blackwell_mixed_mxfp8_bf16_sparse_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/84_blackwell_narrow_precision_sparse_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/86_blackwell_mixed_dtype_gemm/86_blackwell_mixed_dtype.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/86_blackwell_mixed_dtype_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/86_blackwell_mixed_dtype_gemm/mixed_dtype_helper.cuh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/87_blackwell_geforce_gemm_blockwise/87a_blackwell_geforce_fp8_bf16_gemm_blockwise.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/87_blackwell_geforce_gemm_blockwise/87b_blackwell_geforce_fp8_bf16_gemm_groupwise.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/87_blackwell_geforce_gemm_blockwise/87c_blackwell_geforce_fp8_bf16_grouped_gemm_groupwise.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/87_blackwell_geforce_gemm_blockwise/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/87_blackwell_geforce_gemm_blockwise/utils.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/88_hopper_fmha.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/collective/fmha_collective_bwd_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/collective/fmha_collective_load.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/collective/fmha_collective_softmax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/collective/fmha_collective_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/collective/fmha_collective_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/collective/fmha_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/collective/fmha_epilogue.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/collective/fmha_epilogue_bwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/collective/fmha_fusion.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/device/device_universal.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/device/fmha_device_bwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/kernel/fmha_kernel_builder.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/kernel/fmha_kernel_bwd_convert.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/kernel/fmha_kernel_bwd_sum_OdO.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/kernel/fmha_kernel_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/kernel/fmha_kernel_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/kernel/fmha_options.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/kernel/fmha_tile_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/reference/fmha_bwd_reference.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/reference/fmha_reference.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/88_hopper_fmha/reference/reference_abs_error.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/89_sm103_fp4_ultra_gemm/89_sm103_fp4_ultra_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/89_sm103_fp4_ultra_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/90_sm103_fp4_ultra_grouped_gemm/90_sm103_fp4_ultra_grouped_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/90_sm103_fp4_ultra_grouped_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/91_fp4_gemv/91_fp4_gemv.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/91_fp4_gemv/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/92_blackwell_moe_gemm/92_blackwell_moe_gemm_fp4_grouped.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/92_blackwell_moe_gemm/92_blackwell_moe_gemm_fp4_regular.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/92_blackwell_moe_gemm/92_blackwell_moe_gemm_grouped.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/92_blackwell_moe_gemm/92_blackwell_moe_gemm_rcgrouped.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/92_blackwell_moe_gemm/92_blackwell_moe_gemm_regular.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/92_blackwell_moe_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/common/dist_gemm_helpers.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/common/gather_tensor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/common/helper.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/cute/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/cute/tutorial/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/cute/tutorial/blackwell/01_mma_sm100.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/cute/tutorial/blackwell/02_mma_tma_sm100.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/cute/tutorial/blackwell/03_mma_tma_multicast_sm100.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/cute/tutorial/blackwell/04_mma_tma_2sm_sm100.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/cute/tutorial/blackwell/05_mma_tma_epi_sm100.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/cute/tutorial/blackwell/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/cute/tutorial/blackwell/example_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/cute/tutorial/hopper/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/cute/tutorial/hopper/wgmma_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/cute/tutorial/hopper/wgmma_tma_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/cute/tutorial/sgemm_1.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/cute/tutorial/sgemm_2.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/cute/tutorial/sgemm_sm70.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/cute/tutorial/sgemm_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/cute/tutorial/tiled_copy.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/cute/tutorial/tiled_copy_if.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/ampere/call_bypass_dlpack.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/ampere/call_from_jit.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/ampere/dynamic_smem_size.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/ampere/elementwise_add.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/ampere/elementwise_apply.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/ampere/flash_attention_v2.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/ampere/hstu_attention.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/ampere/inline_ptx.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/ampere/sgemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/ampere/smem_allocator.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/ampere/tensorop_gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell/blockwise_gemm/blockwise_gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell/blockwise_gemm/contiguous_grouped_gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell/blockwise_gemm/masked_grouped_gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell/dense_blockscaled_gemm_persistent.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell/dense_gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell/dense_gemm_alpha_beta_persistent.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell/dense_gemm_persistent.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell/dense_gemm_software_pipeline.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell/fmha.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell/fmha_bwd.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell/grouped_blockscaled_gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell/grouped_gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell/mamba2_ssd/mamba2_ssd.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell/mamba2_ssd/mamba2_ssd_reference.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell/mamba2_ssd/mamba2_ssd_tile_scheduler.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell/mixed_input_gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell/mla.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell/programmatic_dependent_launch.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell/tutorial_gemm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell/tutorial_gemm/fp16_gemm_0.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/blackwell_geforce/dense_gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/cute/ffi/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/cute/ffi/jit_argument.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/cute/ffi/tensor.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/cute/torch_fake_tensor.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/hopper/dense_gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/hopper/dense_gemm_persistent.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/hopper/fmha.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/notebooks/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/notebooks/async_pipeline.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/notebooks/benchmark_autotune.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/notebooks/composed_layout.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/notebooks/cuda_graphs.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/notebooks/cute_layout_algebra.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/notebooks/data_types.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/notebooks/elementwise_add.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/notebooks/hello_world.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/notebooks/images/cuda_graphs_image.png +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/notebooks/print.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/notebooks/tensor.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/notebooks/tensorssa.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/utils/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/utils/fmha_helpers.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/utils/sparse_utils.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/CuTeDSL/utils/test_sparse_utils.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/deprecated/00_basic_gemm.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/deprecated/01_epilogue.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/deprecated/02_pytorch_extension_grouped_gemm.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/deprecated/03_basic_conv2d.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/deprecated/04_epilogue_visitor.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/examples/python/deprecated/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/algorithm/axpby.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/algorithm/clear.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/algorithm/cooperative_copy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/algorithm/cooperative_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/algorithm/copy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/algorithm/fill.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/algorithm/functional.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/algorithm/gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/algorithm/prefer.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/algorithm/prefetch.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/algorithm/tensor_algorithms.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/algorithm/tensor_reduce.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/algorithm/tuple_algorithms.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/cluster_sm100.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/cluster_sm90.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/config.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/copy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/copy_sm100.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/copy_sm100_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/copy_sm50.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/copy_sm75.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/copy_sm80.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/copy_sm90.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/copy_sm90_desc.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/copy_sm90_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/mma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/mma_sm100.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/mma_sm100_desc.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/mma_sm100_umma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/mma_sm120.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/mma_sm120_sparse.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/mma_sm61.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/mma_sm70.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/mma_sm75.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/mma_sm80.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/mma_sm89.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/mma_sm90.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/mma_sm90_desc.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/mma_sm90_gmma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/mma_sm90_gmma_ext.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/mma_sm90_gmma_sparse.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/mma_sm90_gmma_sparse_ext.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/simd_sm100.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/tmem_allocator_sm100.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/arch/util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/copy_atom.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/copy_traits.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/copy_traits_sm100.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/copy_traits_sm100_im2col.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/copy_traits_sm100_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/copy_traits_sm50.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/copy_traits_sm75.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/copy_traits_sm80.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/copy_traits_sm90.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/copy_traits_sm90_im2col.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/copy_traits_sm90_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/copy_traits_sm90_tma_swizzle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/mma_atom.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/mma_traits.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/mma_traits_sm100.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/mma_traits_sm120.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/mma_traits_sm120_sparse.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/mma_traits_sm61.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/mma_traits_sm70.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/mma_traits_sm75.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/mma_traits_sm80.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/mma_traits_sm89.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/mma_traits_sm90.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/mma_traits_sm90_gmma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/mma_traits_sm90_gmma_ext.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/mma_traits_sm90_gmma_sparse.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/mma_traits_sm90_gmma_sparse_ext.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/atom/partitioner.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/config.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/container/alignment.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/container/array.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/container/array_aligned.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/container/array_subbyte.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/container/bit_field.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/container/cuda_types.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/container/tuple.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/container/type_list.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/int_tuple.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/layout.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/layout_composed.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/numeric/arithmetic_tuple.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/numeric/complex.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/numeric/int.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/numeric/integer_sequence.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/numeric/integral_constant.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/numeric/integral_ratio.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/numeric/math.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/numeric/numeric_types.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/numeric/real.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/pointer.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/pointer_base.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/pointer_flagged.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/pointer_sparse.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/pointer_swizzle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/stride.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/swizzle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/swizzle_layout.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/tensor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/tensor_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/tensor_zip.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/underscore.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/util/debug.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/util/print.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/util/print_latex.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/util/print_svg.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/util/print_tensor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cute/util/type_traits.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/aligned_buffer.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/arch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/barrier.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/cache_operation.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/config.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/grid_dependency_control.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/memory.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/memory_sm75.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/memory_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/mma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/mma_sm100.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/mma_sm50.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/mma_sm60.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/mma_sm61.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/mma_sm70.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/mma_sm75.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/mma_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/mma_sm89.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/mma_sm90.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/mma_sparse_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/mma_sparse_sm89.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/reg_reconfig.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/simd.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/simd_sm60.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/simd_sm61.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/synclog.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/wmma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/wmma_sm70.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/wmma_sm72.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/arch/wmma_sm75.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/array.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/array_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/array_subbyte.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/barrier.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/bfloat16.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/blas3.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/blas3_types.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/block_striped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/cluster_launch.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/constants.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/collective/builders/sm100_common.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/collective/builders/sm100_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/collective/builders/sm90_common.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/collective/builders/sm90_gmma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/collective/collective_builder.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/collective/collective_conv.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/collective/detail.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/collective/sm100_implicit_gemm_umma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/collective/sm90_implicit_gemm_gmma_ss_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/conv2d_problem_size.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/conv3d_problem_size.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/convnd_problem_shape.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/convolution.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/detail.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/device/conv_universal_adapter.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/device/direct_convolution.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/device/implicit_gemm_convolution.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/device/implicit_gemm_convolution_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/dispatch_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/conv_universal.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/default_conv2d.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/default_conv2d_dgrad.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/default_conv2d_fprop.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/default_conv2d_fprop_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/default_conv2d_fprop_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/default_conv2d_fprop_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/default_conv2d_fprop_with_reduction.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/default_conv2d_group_fprop.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/default_conv2d_wgrad.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/default_conv2d_wgrad_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/default_conv3d_dgrad.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/default_conv3d_fprop.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/default_conv3d_fprop_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/default_conv3d_fprop_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/default_conv3d_wgrad.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/default_deconv2d.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/default_deconv2d_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/default_deconv3d.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/default_deconv3d_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/default_depthwise_fprop.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/direct_convolution.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/implicit_gemm_convolution.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/implicit_gemm_convolution_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/implicit_gemm_convolution_strided_dgrad.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/implicit_gemm_convolution_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/implicit_gemm_convolution_with_fused_epilogue.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/sm100_implicit_gemm_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/kernel/sm90_implicit_gemm_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/thread/depthwise_mma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv2d_dgrad_filter_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv2d_dgrad_filter_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv2d_dgrad_output_gradient_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv2d_dgrad_output_gradient_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv2d_fprop_activation_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv2d_fprop_activation_tile_access_iterator_few_channels.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv2d_fprop_activation_tile_access_iterator_fixed_channels.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv2d_fprop_activation_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv2d_fprop_filter_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv2d_fprop_filter_tile_access_iterator_few_channels.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv2d_fprop_filter_tile_access_iterator_fixed_channels.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv2d_fprop_filter_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv2d_params.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv2d_tile_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv2d_wgrad_activation_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv2d_wgrad_activation_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv2d_wgrad_output_gradient_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv2d_wgrad_output_gradient_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv3d_dgrad_filter_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv3d_dgrad_filter_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv3d_dgrad_output_gradient_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv3d_dgrad_output_gradient_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv3d_fprop_activation_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv3d_fprop_activation_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv3d_fprop_filter_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv3d_fprop_filter_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv3d_params.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv3d_wgrad_activation_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv3d_wgrad_activation_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv3d_wgrad_output_gradient_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/conv3d_wgrad_output_gradient_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/depthwise_direct_conv_params.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/depthwise_fprop_activation_tile_access_iterator_direct_conv_fixed_stride_dilation.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/depthwise_fprop_activation_tile_access_iterator_direct_conv_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/depthwise_fprop_direct_conv_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/depthwise_fprop_filter_tile_access_iterator_direct_conv_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/depthwise_fprop_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/depthwise_mma_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/depthwise_mma_core_with_lane_access_size.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/implicit_gemm_fprop_fusion_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/implicit_gemm_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/implicit_gemm_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/implicit_gemm_wgrad_fusion_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/predicated_scale_bias_vector_access_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/predicated_scale_bias_vector_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/threadblock/threadblock_swizzle.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/warp/mma_depthwise_simt.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/warp/mma_depthwise_simt_tile_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/conv/warp/scale_bias_relu_transform.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/coord.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/core_io.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/cuda_host_adapter.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/cutlass.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/detail/blockwise_scale_layout.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/detail/cluster.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/detail/collective/mixed_input_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/detail/collective/sm103_kernel_type.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/detail/collective.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/detail/dependent_false.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/detail/helper_macros.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/detail/layout.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/detail/mainloop_fusion_helper_scale_factor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/detail/mma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/detail/sm100_blockscaled_layout.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/detail/sm100_mixed_dtype_blockwise_layout.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/detail/sm100_tmem_helper.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/detail/sm103_blockscaled_layout.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/device_kernel.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/builders/sm100_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/builders/sm103_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/builders/sm120_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/builders/sm120_common.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/builders/sm90_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/builders/sm90_common.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/collective_builder.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/collective_epilogue.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/default_epilogue.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/default_epilogue_array.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/detail.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/epilogue_tensor_broadcast.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/sm100_epilogue_array_nosmem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/sm100_epilogue_array_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/sm100_epilogue_nosmem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/sm100_epilogue_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/sm70_epilogue_vectorized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/sm70_epilogue_vectorized_array.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/sm90_epilogue_array_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/sm90_epilogue_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/collective/sm90_epilogue_tma_warpspecialized_bias_elementwise.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/dispatch_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/fusion/callbacks.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/fusion/operations.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/fusion/sm100_callbacks_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/fusion/sm100_visitor_compute_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/fusion/sm100_visitor_store_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/fusion/sm120_callbacks_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/fusion/sm120_visitor_store_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/fusion/sm90_callbacks_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/fusion/sm90_visitor_compute_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/fusion/sm90_visitor_load_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/fusion/sm90_visitor_store_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/fusion/sm90_visitor_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/fusion/sm90_visitor_topk_softmax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/activation.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/conversion_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/detail.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/linear_combination.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/linear_combination_bias_elementwise.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/linear_combination_bias_relu.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/linear_combination_clamp.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/linear_combination_dgelu.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/linear_combination_drelu.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/linear_combination_gelu.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/linear_combination_generic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/linear_combination_generic_with_scaling.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/linear_combination_hardswish.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/linear_combination_leaky_relu.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/linear_combination_params.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/linear_combination_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/linear_combination_relu.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/linear_combination_relu0.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/linear_combination_residual_block.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/linear_combination_sigmoid.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/linear_combination_silu.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/linear_combination_tensor_broadcast.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/linear_combination_with_elementwise.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/reduction_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/thread/scale_type.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_complex_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_complex_tensor_op_blas3.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_direct_store.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_simt.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_tensor_op_blas3.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_volta_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_with_reduction.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_wmma_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/default_thread_map_simt.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/default_thread_map_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/default_thread_map_volta_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/default_thread_map_wmma_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/direct_store_epilogue_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/epilogue.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/epilogue_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/epilogue_base_streamk.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/epilogue_depthwise.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/epilogue_direct_store.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/epilogue_gemm_k_reduction.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/epilogue_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/epilogue_smem_accumulator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/epilogue_streamk_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/epilogue_visitor_with_softmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/epilogue_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/epilogue_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/epilogue_with_reduction.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/epilogue_with_scaling_factor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/epilogue_with_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/epilogue_with_visitor_callbacks.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/epilogue_workspace.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/fusion/visitor_2x.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/fusion/visitor_compute.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/fusion/visitor_load.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/fusion/visitor_store.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/fusion/visitors.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/interleaved_epilogue.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/output_iterator_parameter.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/output_tile_thread_map.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/predicated_tile_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/predicated_tile_iterator_affine.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/predicated_tile_iterator_affine_layout_params.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/predicated_tile_iterator_blas3.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/predicated_tile_iterator_conv.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/predicated_tile_iterator_direct_conv.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/predicated_tile_iterator_params.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/predicated_tile_iterator_predicates.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/predicated_tile_iterator_strided_dgrad.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/shared_load_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/shared_load_iterator_mixed.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/threadblock/shared_load_iterator_pitch_linear.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/warp/fragment_iterator_complex_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/warp/fragment_iterator_gaussian_complex_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/warp/fragment_iterator_simt.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/warp/fragment_iterator_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/warp/fragment_iterator_volta_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/warp/fragment_iterator_wmma_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/warp/simt_policy.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/warp/tensor_op_policy.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/warp/tile_iterator_simt.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/warp/tile_iterator_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/warp/tile_iterator_tensor_op_mixed.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/warp/tile_iterator_volta_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/warp/tile_iterator_wmma_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/warp/volta_tensor_op_policy.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/epilogue/warp/wmma_tensor_op_policy.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/exmy_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/experimental/distributed/device/detail.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/experimental/distributed/device/dist_gemm_universal_wrapper.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/experimental/distributed/device/full_barrier.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/experimental/distributed/kernel/detail.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/experimental/distributed/kernel/dist_gemm_kernel_wrapper.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/experimental/distributed/kernel/full_barrier.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/experimental/distributed/schedules/dist_gemm_1d_schedules.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/experimental/distributed/schedules/dist_gemm_base_schedule.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/fast_math.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/float8.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/float_subbyte.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/floating_point_nvrtc.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/functional.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm100_9xBF16_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm100_blockscaled_mixed_tma_cpasync_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm100_blockscaled_sparse_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm100_blockscaled_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm100_blockwise_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm100_common.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm100_cpasync_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm100_mixed_input_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm100_mixed_tma_cpasync_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm100_pipeline_carveout.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm100_simt_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm100_sparse_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm100_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm103_blockscaled_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm120_blockscaled_mma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm120_blockscaled_sparse_mma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm120_blockwise_mma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm120_common.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm120_mma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm120_sparse_mma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm1xx_common.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm1xx_sparse_config.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm90_common.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm90_gmma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm90_sparse_config.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/builders/sm90_sparse_gmma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/collective_builder.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/collective_builder_decl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/collective_mma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/collective_mma_decl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/fp8_accumulation.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm100_blockscaled_mma_array_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm100_blockscaled_mma_mixed_tma_cpasync_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm100_blockscaled_mma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm100_blockscaled_sparse_mma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm100_mma_array_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm100_mma_array_warpspecialized_blockwise_scaling.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm100_mma_array_warpspecialized_emulated.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm100_mma_array_warpspecialized_rcggemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm100_mma_cpasync_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm100_mma_mixed_tma_cpasync_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm100_mma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm100_mma_warpspecialized_blockwise_scaling.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm100_mma_warpspecialized_emulated.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm100_mma_warpspecialized_mixed_input.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm100_sparse_mma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm103_blockscaled_mma_array_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm103_blockscaled_mma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm120_blockscaled_mma_array_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm120_blockscaled_mma_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm120_blockscaled_sparse_mma_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm120_mma_array_tma_blockwise_scaling.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm120_mma_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm120_mma_tma_blockwise_scaling.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm120_sparse_mma_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm70_mma_twostage.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm80_mma_array_multistage.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm80_mma_multistage.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm90_mma_array_tma_gmma_rs_warpspecialized_mixed_input.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm90_mma_array_tma_gmma_ss_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm90_mma_array_tma_gmma_ss_warpspecialized_fp8.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm90_mma_array_tma_gmma_ss_warpspecialized_fp8_blockwise_scaling.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm90_mma_multistage_gmma_rs_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm90_mma_multistage_gmma_ss_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm90_mma_tma_gmma_rs_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm90_mma_tma_gmma_rs_warpspecialized_mixed_input.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm90_mma_tma_gmma_ss.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm90_mma_tma_gmma_ss_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm90_mma_tma_gmma_ss_warpspecialized_fp8.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm90_mma_tma_gmma_ss_warpspecialized_fp8_blockwise_scaling.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm90_sparse_mma_tma_gmma_ss_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/collective/sm90_sparse_mma_tma_gmma_ss_warpspecialized_fp8.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/base_grouped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/default_gemm_configuration.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/ell_gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemm_array.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemm_batched.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemm_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemm_grouped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemm_layernorm_mainloop_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemm_sparse.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemm_sparse_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemm_sparse_universal_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemm_sparse_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemm_sparse_with_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemm_splitk_parallel.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemm_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemm_universal_adapter.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemm_universal_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemm_universal_streamk_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemm_universal_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemm_universal_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemm_with_k_reduction.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemv.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/gemv_blockscaled.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/rank_2k.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/rank_2k_grouped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/rank_k.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/symm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/device/trmm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/dispatch_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/gemm_enumerated_types.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/group_array_problem_shape.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_ell_gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemm_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemm_grouped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemm_grouped_per_group_scale.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemm_grouped_softmax_mainloop_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemm_layernorm_mainloop_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemm_planar_complex_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemm_sparse.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemm_sparse_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemm_sparse_universal_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemm_sparse_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemm_sparse_with_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemm_splitk_parallel.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemm_streamk_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemm_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemm_universal_with_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemm_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemm_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemm_with_k_reduction.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemm_with_reduction.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_gemv.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_rank_2k.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_rank_2k_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_rank_2k_grouped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_rank_2k_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_rank_k.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_rank_k_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_rank_k_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_symm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_symm_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_symm_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_trmm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_trmm_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/default_trmm_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/ell_gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_array.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_batched.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_grouped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_grouped_per_group_scale.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_grouped_problem_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_grouped_softmax_mainloop_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_layernorm_mainloop_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_params.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_planar_complex_array.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_sparse_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_sparse_universal_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_splitk_parallel.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_streamk_with_fused_epilogue.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_transpose_operands.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_universal.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_universal_decl.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_universal_streamk.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_universal_with_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_universal_with_visitor_streamk.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_with_fused_epilogue.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemm_with_k_reduction.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemv.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemv_batched_strided.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/gemv_blockscaled.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/grouped_problem_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/params_sparse_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/params_universal_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/rank_2k_grouped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/rank_2k_grouped_problem_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/rank_2k_transpose_operands.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/rank_2k_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/rank_k_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm100_gemm_array_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm100_gemm_array_tma_warpspecialized_input_transform.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm100_gemm_array_tma_warpspecialized_mma_transform.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm100_gemm_cpasync_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm100_gemm_mixed_tma_cpasync_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm100_gemm_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm100_gemm_tma_warpspecialized_input_transform.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm100_gemm_tma_warpspecialized_mixed_input_transform.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm100_gemm_tma_warpspecialized_mma_transform.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm100_sparse_gemm_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm100_static_tile_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm100_tile_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm100_tile_scheduler_group.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm100_tile_scheduler_stream_k.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm103_blockscaled_gemm_array_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm103_blockscaled_gemm_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm120_gemm_tma_warpspecialized_cooperative_asymmetric_dma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm70_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm70_gemm_array.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm90_gemm_array_tma_warpspecialized_cooperative.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm90_gemm_array_tma_warpspecialized_pingpong.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm90_gemm_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm90_gemm_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm90_gemm_tma_warpspecialized_cooperative.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm90_gemm_tma_warpspecialized_pingpong.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm90_gemm_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm90_gemm_warpspecialized_cooperative.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm90_gemm_warpspecialized_pingpong.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm90_tile_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm90_tile_scheduler_group.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sm90_tile_scheduler_stream_k.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sparse_gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sparse_gemm_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/sparse_gemm_with_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/static_tile_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/symm_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/tile_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/tile_scheduler_detail.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/tile_scheduler_params.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/kernel/trmm_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/thread/mma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/thread/mma_sm50.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/thread/mma_sm60.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/thread/mma_sm61.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_ell_mma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_gemv_core.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_mma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_mma_core.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_mma_core_simt.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_mma_core_sm70.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_mma_core_sm75.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_mma_core_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_mma_core_sparse_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_mma_core_with_access_size.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_mma_core_with_reduction.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_mma_core_wmma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_mma_layernorm_mainloop_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_mma_planar_complex_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_mma_planar_complex_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_mma_softmax_mainloop_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_mma_with_reduction.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_multistage_mma_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_multistage_mma_complex_core.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_multistage_mma_complex_core_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_multistage_trmm_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_sparse_mma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/default_trmm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/ell_mma_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/ell_mma_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/gemv.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/index_remat.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/mma_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/mma_blas3_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/mma_layernorm_mainloop_fusion_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/mma_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/mma_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/mma_planar_complex_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/mma_planar_complex_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/mma_planar_complex_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/mma_singlestage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/mma_softmax_mainloop_fusion_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/mma_sparse_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/mma_sparse_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/mma_with_reduction_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/threadblock_swizzle.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/threadblock/threadblock_swizzle_streamk.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/default_mma_complex_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/default_mma_sparse_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/default_mma_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/default_mma_tensor_op_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/default_mma_with_reduction_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/default_mma_wmma_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/layernorm_scale_bias_transform.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_complex_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_complex_tensor_op_fast_f32.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_complex_tensor_op_tile_iterator_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_gaussian_complex_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_gaussian_complex_tensor_op_tile_iterator_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_mixed_input_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_simt.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_simt_policy.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_simt_tile_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_sparse_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_tensor_op_fast_f32.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_tensor_op_fragment_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_tensor_op_policy.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_tensor_op_sm70.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_tensor_op_tile_access_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_tensor_op_tile_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_tensor_op_tile_iterator_sm70.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_tensor_op_tile_iterator_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_tensor_op_tile_iterator_sparse.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_tensor_op_tile_iterator_wmma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_tensor_op_wmma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/mma_with_reduction_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/scale_bias_tile_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/softmax_scale_bias_transform.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm/warp/tile_iterator_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm_coord.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/gemm_coord.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/half.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/integer_subbyte.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/kernel_hardware_info.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/kernel_hardware_info.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/kernel_launch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/layout/layout.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/layout/matrix.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/layout/permute.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/layout/pitch_linear.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/layout/tensor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/layout/tensor_op_multiplicand_sm70.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/layout/tensor_op_multiplicand_sm75.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/layout/tensor_op_multiplicand_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/layout/vector.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/matrix.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/matrix_coord.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/matrix_shape.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/numeric_conversion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/numeric_size.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/numeric_types.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/pipeline/pipeline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/pipeline/sm100_pipeline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/pipeline/sm90_pipeline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/pitch_linear_coord.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/platform/platform.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/predicate_vector.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/quaternion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/real.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/reduction/device/reduce_split_k.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/reduction/device/tensor_reduce.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/reduction/device/tensor_reduce_affine_contiguous.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/reduction/device/tensor_reduce_affine_strided.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/reduction/kernel/reduce_softmax_final.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/reduction/kernel/reduce_split_k.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/reduction/kernel/tensor_reduce_affine_contiguous.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/reduction/kernel/tensor_reduce_affine_strided.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/reduction/thread/reduce.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/reduction/thread/reduction_operators.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/reduction/threadblock_swizzle.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/relatively_equal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/semaphore.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/subbyte_reference.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/tensor_coord.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/tensor_ref.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/tensor_ref_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/tensor_view.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/tensor_view_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/tfloat32.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/thread/matrix.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/trace.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/collective/sm90_wgmma_transpose.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/device/transform_universal_adapter.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/kernel/filter_format_transformer.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/kernel/sm90_sparse_gemm_compressor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/kernel/sparse_gemm_compressor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/pitch_linear_thread_map.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/thread/transpose.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/thread/unary_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/ell_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/ell_predicated_tile_access_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/ell_predicated_tile_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/predicated_scale_bias_vector_access_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/predicated_scale_bias_vector_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/predicated_tile_access_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/predicated_tile_access_iterator_2dthreadtile.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/predicated_tile_access_iterator_params.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/predicated_tile_access_iterator_triangular_matrix.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/predicated_tile_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/predicated_tile_iterator_2dthreadtile.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/predicated_tile_iterator_triangular_matrix.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/predicated_vector_access_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/regular_scale_bias_vector_access_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/regular_tile_access_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/regular_tile_access_iterator_pitch_linear.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/regular_tile_access_iterator_pitch_linear_direct_conv.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/regular_tile_access_iterator_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/regular_tile_access_iterator_tensor_op_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/regular_tile_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/regular_tile_iterator_pitch_linear.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/regular_tile_iterator_pitch_linear_2dthreadtile.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/regular_tile_iterator_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/regular_tile_iterator_tensor_op_sm70.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/threadblock/vector_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/transform/warp/vector_fragment_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/uint128.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/uint256.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/version.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/wmma_array.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/include/cutlass/workspace.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/GPU_Clock.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/command_line.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/cublas_wrappers.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/debug.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/device_dump.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/device_groupnorm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/device_layernorm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/device_memory.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/device_nchw_to_nhwc.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/device_nhwc_padding.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/device_nhwc_pooling.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/device_nhwc_to_nchw.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/device_rmsnorm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/device_utils.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/distribution.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/exceptions.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/gett_commandline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/helper_cuda.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/host_reorder.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/host_tensor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/host_tensor_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/host_uncompress.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/index_sequence.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/mixed_dtype_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/packed_stride.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/print_error.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/detail/inner_product.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/detail/linear_to_coordinate.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/device/convolution.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/device/gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/device/gemm_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/device/gemm_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/device/gett.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/device/kernel/gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/device/kernel/tensor_elementwise.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/device/kernel/tensor_foreach.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/device/rank_2k_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/device/tensor_compare.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/device/tensor_fill.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/device/tensor_foreach.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/device/tensor_reduce.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/device/tensor_relu.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/device/thread/gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/conv.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/convolution.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/error_metrics.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/gemm_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/gemm_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/gett.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/rank_2k.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/rank_2k_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/rank_k_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/symm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/symm_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/tensor_compare.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/tensor_compare.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/tensor_copy.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/tensor_elementwise.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/tensor_fill.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/tensor_fill.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/tensor_foreach.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/tensor_norm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/tensor_reduce.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/tensor_reduce.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/trmm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/reference/host/trmm_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/tensor_view_io.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/cutlass/tools/util/include/cutlass/util/type_traits.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/.azuredevops/rocm-ci.yml +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/.clang-format +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/.clang-tidy +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/.git +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/.github/CODEOWNERS +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/.github/CONTRIBUTING.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/.github/ISSUE_TEMPLATE/config.yml +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/.github/ISSUE_TEMPLATE/issue_report.yml +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/.github/ISSUE_TEMPLATE.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/.github/PULL_REQUEST_TEMPLATE.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/.github/dependabot.yml +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/.gitignore +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/.pre-commit-config.yaml +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/.readthedocs.yaml +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/CHANGELOG.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/CITATION.cff +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/CONTRIBUTORS.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/Config.cmake.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/Dockerfile +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/Dockerfile.compiler +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/Jenkinsfile +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/LICENSE +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/TERMINOLOGY.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/01_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/01_gemm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/01_gemm/gemm.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/02_gemm_add_add_fastgelu/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/02_gemm_add_add_fastgelu/gemm_add_add_fastgelu.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/02_gemm_add_add_fastgelu/gemm_add_add_fastgelu_generic.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/02_gemm_add_add_fastgelu/gemm_add_fastgelu.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/02_gemm_add_add_fastgelu/gemm_add_fastgelu_generic.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/02_gemm_add_add_fastgelu/gemm_fastgelu.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/02_gemm_add_add_fastgelu/gemm_fastgelu_generic.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/03_gemm_layernorm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/03_gemm_layernorm/gemm_add_add_layernorm_naive.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/03_gemm_layernorm/gemm_add_relu_add_layernorm_welford.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/04_contraction/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/04_contraction/contraction_bilinear_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/04_contraction/contraction_bilinear_fp64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/04_contraction/contraction_g1m2n3k1_add_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/04_contraction/contraction_scale_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/04_contraction/contraction_scale_fp64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/05_layernorm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/05_layernorm/layernorm2d_bwd_data.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/05_layernorm/layernorm2d_bwd_gamma_beta.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/05_layernorm/layernorm2d_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/05_layernorm/layernorm4d_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/06_softmax/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/06_softmax/softmax4d.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/07_grouped_convnd_fwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/07_grouped_convnd_fwd/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/07_grouped_convnd_fwd/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/07_grouped_convnd_fwd/grouped_conv1d_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/07_grouped_convnd_fwd/grouped_conv2d_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/07_grouped_convnd_fwd/grouped_conv2d_fwd_ngchw.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/07_grouped_convnd_fwd/grouped_conv3d_fwd_bf8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/07_grouped_convnd_fwd/grouped_conv3d_fwd_bf8_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/07_grouped_convnd_fwd/grouped_conv3d_fwd_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/07_grouped_convnd_fwd/grouped_conv3d_fwd_fp8_bf8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/08_fused_attention/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/08_fused_attention/fused_attention.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/08_fused_attention/fused_attention_bias.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/09_quantization/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/09_quantization/conv2d_fwd_bias_relu_perchannel_quantization.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/09_quantization/conv2d_fwd_bias_relu_perlayer_quantization.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/09_quantization/conv2d_fwd_bias_tanh_perchannel_quantization.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/09_quantization/conv2d_fwd_bias_tanh_perlayer_quantization.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/09_quantization/conv2d_fwd_perchannel_quantization.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/09_quantization/conv2d_fwd_perlayer_quantization.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/09_quantization/gemm_quantization.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/10_grouped_convnd_bwd_data/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/10_grouped_convnd_bwd_data/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/10_grouped_convnd_bwd_data/grouped_conv2d_bwd_data.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/10_grouped_convnd_bwd_data/grouped_conv2d_bwd_data_ngchw.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/10_grouped_convnd_bwd_data/grouped_conv3d_bwd_data.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/10_grouped_convnd_bwd_data/grouped_conv3d_bwd_data_input_fp16_comp_bf8f8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/11_grouped_conv_bwd_weight/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/11_grouped_conv_bwd_weight/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/11_grouped_conv_bwd_weight/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/11_grouped_conv_bwd_weight/grouped_conv1d_bwd_weight_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/11_grouped_conv_bwd_weight/grouped_conv2d_bwd_weight_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/11_grouped_conv_bwd_weight/grouped_conv3d_bwd_weight_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/11_grouped_conv_bwd_weight/grouped_conv3d_bwd_weight_fp16_comp_bf8_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/11_grouped_conv_bwd_weight/grouped_conv3d_bwd_weight_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/12_elementwise_normalization/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/12_elementwise_normalization/elementwise_layernorm2d.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/13_batchnorm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/13_batchnorm/batchnorm_bwd_nhwc.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/13_batchnorm/batchnorm_fwd_nhwc.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/13_batchnorm/batchnorm_infer_nhwc.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/14_instance_id/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/14_instance_id/batchnorm_fwd_instance_id.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/15_convnd_bwd_data/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/15_convnd_bwd_data/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/15_convnd_bwd_data/conv3d_bwd_data_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/15_convnd_bwd_data/conv3d_bwd_data_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/15_gemm_add_multiply/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/15_gemm_add_multiply/gemm_add_multiply.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/15_reduce/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/15_reduce/reduce_nhwc_c.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/16_convnd_fwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/16_convnd_fwd/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/16_convnd_fwd/conv3d_fwd_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/16_convnd_fwd/conv3d_fwd_fp16_comp_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/16_convnd_fwd/conv3d_fwd_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/17_grouped_gemm_fastgelu/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/17_grouped_gemm_fastgelu/grouped_gemm_fastgelu.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/18_groupnorm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/18_groupnorm/groupnorm_bwd_data.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/18_groupnorm/groupnorm_bwd_gamma_beta.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/18_groupnorm/groupnorm_swish_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/19_pool/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/19_pool/avg_pool3d_bwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/19_pool/avg_pool3d_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/19_pool/max_pool2d_bwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/19_pool/max_pool2d_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/20_splitk_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/20_splitk_gemm/splitK_gemm_fp16_f8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/21_grouped_gemm_bias/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/21_grouped_gemm_bias/grouped_gemm_fixed_nk_bias_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/22_grouped_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/22_grouped_gemm/grouped_gemm_fixed_nk_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/22_grouped_gemm/grouped_gemm_fixed_nk_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/22_grouped_gemm/grouped_gemm_fixed_nk_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/22_grouped_gemm/grouped_gemm_fixed_nk_i8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/22_im2col_col2im/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/22_im2col_col2im/column_to_image.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/22_im2col_col2im/image_to_column.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/23_elementwise_transpose/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/23_elementwise_transpose/elementwise_transpose_3d.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_bwd_data_bilinear/grouped_conv_bwd_data_bilinear_residual_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_bwd_data_scale/grouped_conv_bwd_data_scale_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_bwd_weight_bilinear/grouped_conv_bwd_weight_bilinear_residual_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_bwd_weight_scale/grouped_conv_bwd_weight_scale_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_bilinear/grouped_conv_fwd_bilinear_residual_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_convinvscale/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_convinvscale/conv3d_fwd_convinvscale_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_convscale/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_convscale/conv3d_fwd_convscale_bf8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_convscale/conv3d_fwd_convscale_bf8_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_convscale/conv3d_fwd_convscale_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_convscale/conv3d_fwd_convscale_fp8_bf8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_convscale_add/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_convscale_add/conv3d_fwd_convscale_add_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_convscale_reduce/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_convscale_reduce/conv3d_fwd_convscale_amax_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_convscale_reduce/conv3d_fwd_convscale_relu_amax_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_convscale_relu/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_convscale_relu/conv3d_fwd_convscale_relu_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_scale/grouped_conv_fwd_scale_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_scaleadd_ab/grouped_conv_fwd_scaleadd_ab.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_scaleadd_ab/grouped_conv_fwd_scaleadd_ab_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_scaleadd_ab/grouped_conv_fwd_scaleadd_ab_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_scaleadd_ab/grouped_conv_fwd_scaleadd_ab_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_scaleadd_ab/grouped_conv_fwd_scaleadd_ab_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_scaleadd_scaleadd_relu/grouped_conv_fwd_scaleadd_scaleadd_relu.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_scaleadd_scaleadd_relu/grouped_conv_fwd_scaleadd_scaleadd_relu_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_scaleadd_scaleadd_relu/grouped_conv_fwd_scaleadd_scaleadd_relu_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_scaleadd_scaleadd_relu/grouped_conv_fwd_scaleadd_scaleadd_relu_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/24_grouped_conv_activation/grouped_convnd_fwd_scaleadd_scaleadd_relu/grouped_conv_fwd_scaleadd_scaleadd_relu_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/25_wrapper/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/25_wrapper/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/25_wrapper/tensor_transform_using_wrapper.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/25_wrapper/wrapper_basic_gemm.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/25_wrapper/wrapper_img2col.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/25_wrapper/wrapper_optimized_gemm.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/30_gemm_bf16Aint8B/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/30_gemm_bf16Aint8B/gemm_bias_fastgelu_xdl_bf16_i8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/30_gemm_bf16Aint8B/gemm_bias_xdl_bf16_i8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/30_gemm_bf16Aint8B/gemm_xdl_bf16_i8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/30_gemm_bf16Aint8B/gemm_xdl_gelu_bf16_i8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/30_gemm_bf16Aint8B/gemm_xdl_multiply_bf16_i8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/31_grouped_gemm_bf16Aint8B/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/31_grouped_gemm_bf16Aint8B/grouped_gemm_bias_fastgelu_xdl_bf16_i8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/31_grouped_gemm_bf16Aint8B/grouped_gemm_fastgelu_xdl_bf16_i8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/31_grouped_gemm_bf16Aint8B/grouped_gemm_multiply_bias_fastgelu_xdl_bf16_i8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/31_grouped_gemm_bf16Aint8B/grouped_gemm_multiply_xdl_bf16_i8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/31_grouped_gemm_bf16Aint8B/grouped_gemm_xdl_bf16_i8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/32_gemm_mx/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/32_gemm_mx/gemm_mx_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/client_example/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/cmake/Analyzers.cmake +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/cmake/ClangTidy.cmake +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/cmake/CppCheck.cmake +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/cmake/DoxygenDoc.cmake +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/cmake/Embed.cmake +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/cmake/EnableCompilerWarnings.cmake +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/cmake/ShardInstantiation.cmake +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/cmake/TargetFlags.cmake +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/cmake/call_shard.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/cmake/getopt.cmake +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/cmake/gtest.cmake +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/cmake/instantiate_shard.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/driver/main.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/include/ck/host/device_batched_gemm_softmax_gemm/operation.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/include/ck/host/device_batched_gemm_softmax_gemm/problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/include/ck/host/device_gemm_multiple_d/operation.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/include/ck/host/device_gemm_multiple_d/problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/include/ck/host/device_gemm_multiple_d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/include/ck/host/device_grouped_conv_fwd_multiple_d/conv_fwd_op.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/include/ck/host/device_grouped_conv_fwd_multiple_d/conv_fwd_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/include/ck/host/headers.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/include/ck/host/operation/gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/include/ck/host/stringutils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/include/ck/host/types.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/include/ck/host/utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/src/device_batched_gemm_softmax_gemm.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/src/device_batched_gemm_softmax_gemm_operation_xdl_cshuffle.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/src/device_gemm_multiple_d.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/src/device_gemm_multiple_d_operation_xdl_cshuffle.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/src/device_grouped_conv_fwd_multiple_abd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/src/device_grouped_conv_fwd_multiple_abd_operation_xdl_cshuffle.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/src/headers.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/src/types.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/src/utils.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/test/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/test/batched_gemm_softmax_gemm.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/test/gemm_multiple_d.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/test/grouped_conv_fwd_multiple_d_v1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/test/grouped_conv_fwd_multiple_d_v2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/test/grouped_conv_fwd_multiple_d_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/test/grouped_conv_fwd_multiple_d_v4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/test/include/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/test/include/test.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/test/rtc/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/test/rtc/include/rtc/compile_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/test/rtc/include/rtc/filesystem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/test/rtc/include/rtc/hip.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/test/rtc/include/rtc/kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/test/rtc/include/rtc/manage_ptr.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/test/rtc/include/rtc/tmp_dir.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/test/rtc/src/compile_kernel.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/test/rtc/src/hip.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/test/rtc/src/kernel.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/codegen/test/rtc/src/tmp_dir.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/dev-requirements.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/Contributors_Guide.rst +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/conceptual/Composable-Kernel-math.rst +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/conceptual/Composable-Kernel-structure.rst +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/conf.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/data/ck_component.png +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/data/ck_layer.png +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/doxygen/Doxyfile +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/index.rst +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/install/Composable-Kernel-Docker.rst +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/install/Composable-Kernel-install.rst +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/install/Composable-Kernel-prerequisites.rst +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/license.rst +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/reference/Composable-Kernel-wrapper.rst +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/reference/Composable_Kernel_custom_types.rst +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/reference/Composable_Kernel_supported_scalar_types.rst +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/reference/Composable_Kernel_vector_utilities.rst +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/refs.bib +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/sphinx/_toc.yml.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/sphinx/requirements.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/sphinx/requirements.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/docs/tutorial/Composable-Kernel-examples.rst +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_dl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_dl_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_dl_int4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_dl_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_dpp_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_wmma_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_wmma_bf16_pk_i4_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_wmma_bf16_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_wmma_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_wmma_fp16_fp8_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_wmma_fp16_pk_i4_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_wmma_fp16_pk_i4_v3_b_scale.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_wmma_fp16_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_wmma_fp8_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_wmma_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_bf16_pk_i4_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_bf16_streamk_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_bf16_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_fp16_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_fp16_fp8_streamk_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_fp16_fp8_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_fp16_pk_i4_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_fp16_pk_i4_v3_b_scale.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_fp16_streamk_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_fp16_v2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_fp16_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_fp64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_fp8_bf8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_fp8_pk_i4_bpreshuffle_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_fp8_pk_i4_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_fp8_streamk_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_fp8_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_int4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_lds_direct_load_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_lds_direct_load_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_skip_b_lds_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_streamk.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/gemm_xdl_wavelet_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/run_gemm_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/run_gemm_example_streamk.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/run_gemm_example_streamk_v2.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/01_gemm/run_gemm_example_v2.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/02_gemm_bilinear/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/02_gemm_bilinear/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/02_gemm_bilinear/gemm_bilinear_wmma_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/02_gemm_bilinear/gemm_bilinear_wmma_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/02_gemm_bilinear/gemm_bilinear_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/03_gemm_bias_relu/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/03_gemm_bias_relu/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/03_gemm_bias_relu/gemm_bias_relu_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/04_gemm_add_add_fastgelu/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/04_gemm_add_add_fastgelu/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/04_gemm_add_add_fastgelu/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/04_gemm_add_add_fastgelu/gemm_add_add_fastgelu_xdl_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/04_gemm_add_add_fastgelu/gemm_add_add_fastgelu_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/04_gemm_add_add_fastgelu/gemm_add_add_fastgelu_xdl_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/04_gemm_add_add_fastgelu/gemm_add_add_fastgelu_xdl_int4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/04_gemm_add_add_fastgelu/gemm_add_add_fastgelu_xdl_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/04_gemm_add_add_fastgelu/gemm_add_add_fastgelu_xdl_lds_direct_load_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/04_gemm_add_add_fastgelu/run_gemm_add_add_fastgelu_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/09_convnd_fwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/09_convnd_fwd/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/09_convnd_fwd/convnd_fwd_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/09_convnd_fwd/convnd_fwd_dl_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/09_convnd_fwd/convnd_fwd_dl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/09_convnd_fwd/convnd_fwd_dl_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/09_convnd_fwd/convnd_fwd_dl_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/09_convnd_fwd/convnd_fwd_xdl_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/09_convnd_fwd/convnd_fwd_xdl_bf8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/09_convnd_fwd/convnd_fwd_xdl_bf8_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/09_convnd_fwd/convnd_fwd_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/09_convnd_fwd/convnd_fwd_xdl_fp16_comp_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/09_convnd_fwd/convnd_fwd_xdl_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/09_convnd_fwd/convnd_fwd_xdl_fp64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/09_convnd_fwd/convnd_fwd_xdl_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/09_convnd_fwd/convnd_fwd_xdl_fp8_bf8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/09_convnd_fwd/convnd_fwd_xdl_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/09_convnd_fwd/run_convnd_fwd_dl_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/09_convnd_fwd/run_convnd_fwd_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/10_convnd_fwd_multiple_d_multiple_reduce/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/10_convnd_fwd_multiple_d_multiple_reduce/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/10_convnd_fwd_multiple_d_multiple_reduce/convnd_fwd_max_xdl_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/10_convnd_fwd_multiple_d_multiple_reduce/convnd_fwd_max_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/10_convnd_fwd_multiple_d_multiple_reduce/convnd_fwd_max_xdl_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/10_convnd_fwd_multiple_d_multiple_reduce/convnd_fwd_max_xdl_int4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/10_convnd_fwd_multiple_d_multiple_reduce/convnd_fwd_max_xdl_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/10_convnd_fwd_multiple_d_multiple_reduce/run_convnd_fwd_max_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/12_reduce/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/12_reduce/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/12_reduce/reduce_blockwise.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/12_reduce/reduce_blockwise_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/12_reduce/reduce_blockwise_two_call.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/12_reduce/reduce_example_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/12_reduce/reduce_multiblock_atomic_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/12_reduce/reduce_multiblock_atomic_add_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/12_reduce/reduce_threadwise_multi_d.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/12_reduce/reduce_threadwise_multi_d_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/13_pool2d_fwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/13_pool2d_fwd/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/13_pool2d_fwd/pool2d_fwd_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/13_pool2d_fwd/pool2d_fwd_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/13_pool2d_fwd/pool2d_fwd_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/14_gemm_quantization/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/14_gemm_quantization/gemm_dl_quantization_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/14_gemm_quantization/gemm_xdl_bias_relu_quantization_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/14_gemm_quantization/gemm_xdl_quantization_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/15_grouped_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/15_grouped_gemm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/15_grouped_gemm/grouped_gemm_multiple_d_dl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/15_grouped_gemm/grouped_gemm_multiple_d_splitk_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/15_grouped_gemm/grouped_gemm_multiple_d_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/15_grouped_gemm/grouped_gemm_xdl_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/15_grouped_gemm/grouped_gemm_xdl_fixed_nk_bias_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/15_grouped_gemm/grouped_gemm_xdl_fixed_nk_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/15_grouped_gemm/grouped_gemm_xdl_fixed_nk_fp16_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/15_grouped_gemm/grouped_gemm_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/15_grouped_gemm/grouped_gemm_xdl_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/15_grouped_gemm/grouped_gemm_xdl_int4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/15_grouped_gemm/grouped_gemm_xdl_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/15_grouped_gemm/grouped_gemm_xdl_splitk_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/15_grouped_gemm/run_grouped_gemm_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/16_gemm_multi_d_multi_reduces/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/16_gemm_multi_d_multi_reduces/gemm_add_add_mean_meansquare_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/16_gemm_multi_d_multi_reduces/gemm_add_addsquare_xdl_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/16_gemm_multi_d_multi_reduces/gemm_max_xdl_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/16_gemm_multi_d_multi_reduces/gemm_max_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/16_gemm_multi_d_multi_reduces/gemm_max_xdl_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/16_gemm_multi_d_multi_reduces/gemm_max_xdl_int4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/16_gemm_multi_d_multi_reduces/gemm_max_xdl_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/16_gemm_multi_d_multi_reduces/gemm_mean_meansquare_xdl_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/16_gemm_multi_d_multi_reduces/gemm_mean_meansquare_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/16_gemm_multi_d_multi_reduces/gemm_mean_meansquare_xdl_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/16_gemm_multi_d_multi_reduces/gemm_reduce_xdl_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/17_convnd_bwd_data/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/17_convnd_bwd_data/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/17_convnd_bwd_data/convnd_bwd_data_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/17_convnd_bwd_data/convnd_bwd_data_dl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/17_convnd_bwd_data/convnd_bwd_data_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/18_batched_gemm_reduce/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/18_batched_gemm_reduce/batched_gemm_reduce_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/19_binary_elementwise/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/19_binary_elementwise/broadcast_add_2d_amn_bn.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/19_binary_elementwise/broadcast_add_3d_am_bmnk.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/19_binary_elementwise/elementwise_add_1d.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/19_binary_elementwise/elementwise_add_4d.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/20_grouped_conv_bwd_weight/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/20_grouped_conv_bwd_weight/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/20_grouped_conv_bwd_weight/grouped_conv_bwd_weight_dl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/20_grouped_conv_bwd_weight/grouped_conv_bwd_weight_v3_xdl_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/20_grouped_conv_bwd_weight/grouped_conv_bwd_weight_v3_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/20_grouped_conv_bwd_weight/grouped_conv_bwd_weight_wmma_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/20_grouped_conv_bwd_weight/grouped_conv_bwd_weight_xdl_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/20_grouped_conv_bwd_weight/grouped_conv_bwd_weight_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/20_grouped_conv_bwd_weight/grouped_conv_bwd_weight_xdl_fp16_comp_bf8_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/20_grouped_conv_bwd_weight/run_grouped_conv_bwd_weight_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/21_gemm_layernorm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/21_gemm_layernorm/gemm_bias_relu_add_layernorm_xdl_naive_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/21_gemm_layernorm/gemm_bias_relu_add_layernorm_xdl_welford_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/21_gemm_layernorm/gemm_layernorm_xdl_naive_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/21_gemm_layernorm/gemm_xdl_layernorm_naive_single_kernel_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/22_cgemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/22_cgemm/cgemm_xdl_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/22_cgemm/cgemm_xdl_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/22_cgemm/cgemm_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/22_cgemm/cgemm_xdl_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/22_cgemm/cgemm_xdl_int4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/22_cgemm/cgemm_xdl_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/23_softmax/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/23_softmax/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/23_softmax/softmax_blockwise.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/24_batched_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/24_batched_gemm/batched_gemm_xdl_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/24_batched_gemm/batched_gemm_xdl_bf16_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/24_batched_gemm/batched_gemm_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/24_batched_gemm/batched_gemm_xdl_fp16int4_b_scale_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/24_batched_gemm/batched_gemm_xdl_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/24_batched_gemm/batched_gemm_xdl_fp8_rowwise_v3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/24_batched_gemm/batched_gemm_xdl_int4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/24_batched_gemm/batched_gemm_xdl_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/24_batched_gemm/run_batched_gemm_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/24_batched_gemm/run_batched_gemm_example_fp16int4_b_scale.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/24_batched_gemm/run_batched_gemm_example_rowwise.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/25_gemm_bias_e_permute/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/25_gemm_bias_e_permute/gemm_bias_e_permute_g1m2n3k1_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/25_gemm_bias_e_permute/gemm_bias_e_permute_g1m3n2k1_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/26_contraction/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/26_contraction/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/26_contraction/common_instances.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/26_contraction/contraction_bilinear_xdl_bf16_compute_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/26_contraction/contraction_bilinear_xdl_fp16_compute_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/26_contraction/contraction_bilinear_xdl_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/26_contraction/contraction_bilinear_xdl_fp32_compute_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/26_contraction/contraction_bilinear_xdl_fp32_compute_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/26_contraction/contraction_bilinear_xdl_fp64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/26_contraction/contraction_bilinear_xdl_fp64_compute_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/26_contraction/contraction_scale_xdl_bf16_compute_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/26_contraction/contraction_scale_xdl_fp16_compute_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/26_contraction/contraction_scale_xdl_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/26_contraction/contraction_scale_xdl_fp32_compute_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/26_contraction/contraction_scale_xdl_fp32_compute_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/26_contraction/contraction_scale_xdl_fp64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/26_contraction/contraction_scale_xdl_fp64_compute_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/26_contraction/run_contraction_bilinear_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/26_contraction/run_contraction_scale_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/27_layernorm2d_fwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/27_layernorm2d_fwd/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/27_layernorm2d_fwd/layernorm2d_fwd_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/27_layernorm2d_fwd/layernorm2d_fwd_splitk_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/27_layernorm2d_fwd/run_layernorm_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/28_grouped_gemm_bias_e_permute/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/28_grouped_gemm_bias_e_permute/grouped_gemm_bias_e_permute_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/29_batched_gemm_bias_e_permute/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/29_batched_gemm_bias_e_permute/batched_gemm_bias_e_permute_wmma_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/29_batched_gemm_bias_e_permute/batched_gemm_bias_e_permute_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/30_grouped_conv_fwd_multiple_d/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/30_grouped_conv_fwd_multiple_d/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/30_grouped_conv_fwd_multiple_d/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/30_grouped_conv_fwd_multiple_d/common_wmma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/30_grouped_conv_fwd_multiple_d/grouped_conv_fwd_bias_relu_add_wmma_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/30_grouped_conv_fwd_multiple_d/grouped_conv_fwd_bias_relu_add_wmma_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/30_grouped_conv_fwd_multiple_d/grouped_conv_fwd_bias_relu_add_xdl_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/30_grouped_conv_fwd_multiple_d/grouped_conv_fwd_bias_relu_add_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/30_grouped_conv_fwd_multiple_d/grouped_conv_fwd_bias_relu_add_xdl_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/30_grouped_conv_fwd_multiple_d/grouped_conv_fwd_bias_relu_add_xdl_int4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/30_grouped_conv_fwd_multiple_d/grouped_conv_fwd_bias_relu_add_xdl_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/30_grouped_conv_fwd_multiple_d/grouped_conv_fwd_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/30_grouped_conv_fwd_multiple_d/run_grouped_conv_fwd_bias_relu_add_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/30_grouped_conv_fwd_multiple_d/run_grouped_conv_fwd_bias_relu_add_wmma_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/30_grouped_conv_fwd_multiple_d/run_grouped_conv_fwd_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/31_batched_gemm_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/31_batched_gemm_gemm/batched_gemm_gemm_xdl_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/31_batched_gemm_gemm/batched_gemm_gemm_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/31_batched_gemm_gemm/batched_gemm_gemm_xdl_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/31_batched_gemm_gemm/batched_gemm_gemm_xdl_int4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/31_batched_gemm_gemm/batched_gemm_gemm_xdl_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/31_batched_gemm_gemm/run_batched_gemm_gemm_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/batched_gemm_lower_triangle_scale_softmax_gemm_permute_wmma_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/batched_gemm_lower_triangle_scale_softmax_gemm_permute_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/batched_gemm_scale_softmax_gemm_permute_wmma_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/batched_gemm_scale_softmax_gemm_permute_xdl_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/batched_gemm_scale_softmax_gemm_permute_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/batched_gemm_scale_softmax_gemm_xdl_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/batched_gemm_scale_softmax_gemm_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/cross_attention_forward_wmma_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/grouped_gemm_lower_triangle_scale_softmax_gemm_permute_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/grouped_gemm_scale_softmax_gemm_permute_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/grouped_query_attention_forward_wmma_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/multi_query_attention_forward_wmma_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/run_batched_gemm_scale_softmax_gemm.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/run_batched_gemm_scale_softmax_gemm_permute.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/run_batched_gemm_scale_softmax_gemm_permute_wmma.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/run_cross_attention_wmma.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/run_grouped_gemm_scale_softmax_gemm_permute.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/run_grouped_query_attention_forward_wmma.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/run_multi_query_attention_forward_wmma.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/run_self_attention_wmma.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/32_batched_gemm_scale_softmax_gemm/self_attention_forward_wmma_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/33_multiple_reduce/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/33_multiple_reduce/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/33_multiple_reduce/dual_reduce_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/33_multiple_reduce/dual_reduce_multiblock.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/33_multiple_reduce/dual_reduce_threadwise.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/34_batchnorm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/34_batchnorm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/34_batchnorm/batchnorm_backward_nhwc.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/34_batchnorm/batchnorm_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/34_batchnorm/batchnorm_forward_inferring_nhwc.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/34_batchnorm/batchnorm_forward_training_nhwc.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/34_batchnorm/batchnorm_forward_training_nhwc_obsolete.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/34_batchnorm/batchnorm_infer_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/35_splitK_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/35_splitK_gemm/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/35_splitK_gemm/gemm_xdl_splitk_reduce_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/35_splitK_gemm/gemm_xdl_splitk_reduce_bf16A_i8B.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/35_splitK_gemm/gemm_xdl_splitk_reduce_multi_d_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/35_splitK_gemm/gemm_xdl_splitk_reduce_multi_d_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/35_splitK_gemm/run_gemm_splitk_reduce_multi_d_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/35_splitK_gemm/run_splitK_gemm_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/35_splitK_gemm/splitK_gemm_xdl_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/35_splitK_gemm/splitK_gemm_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/35_splitK_gemm/splitK_gemm_xdl_fp16_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/35_splitK_gemm/splitK_gemm_xdl_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/35_splitK_gemm/splitK_gemm_xdl_int4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/35_splitK_gemm/splitK_gemm_xdl_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/35_splitK_gemm/splitK_gemm_xdl_lds_direct_load_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/36_sparse_embedding/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/36_sparse_embedding/sparse_embedding3_forward_layernorm.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/37_batched_gemm_add_add_relu_gemm_add/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/37_batched_gemm_add_add_relu_gemm_add/batched_gemm_add_add_relu_gemm_add_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/38_grouped_conv_bwd_data_multiple_d/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/38_grouped_conv_bwd_data_multiple_d/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/38_grouped_conv_bwd_data_multiple_d/grouped_conv_bwd_data_bias_relu_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/38_grouped_conv_bwd_data_multiple_d/grouped_conv_bwd_data_wmma_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/38_grouped_conv_bwd_data_multiple_d/grouped_conv_bwd_data_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/38_grouped_conv_bwd_data_multiple_d/grouped_conv_bwd_data_xdl_fp16_comp_bf8_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/38_grouped_conv_bwd_data_multiple_d/run_grouped_conv_bwd_data_bias_relu_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/38_grouped_conv_bwd_data_multiple_d/run_grouped_conv_bwd_data_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/39_permute/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/39_permute/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/39_permute/permute_1xHxW_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/39_permute/permute_HxWx4_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/39_permute/permute_NxHxW_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/39_permute/run_permute_bundle_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/39_permute/run_permute_element_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/40_conv2d_fwd_quantization/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/40_conv2d_fwd_quantization/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/40_conv2d_fwd_quantization/conv2d_fwd_dl_bias_relu_perchannel_quantization_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/40_conv2d_fwd_quantization/conv2d_fwd_dl_bias_relu_perlayer_quantization_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/40_conv2d_fwd_quantization/conv2d_fwd_dl_bias_tanh_perchannel_quantization_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/40_conv2d_fwd_quantization/conv2d_fwd_dl_bias_tanh_perlayer_quantization_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/40_conv2d_fwd_quantization/conv2d_fwd_dl_perchannel_quantization_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/40_conv2d_fwd_quantization/conv2d_fwd_dl_perlayer_quantization_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/40_conv2d_fwd_quantization/conv2d_fwd_xdl_bias_relu_perchannel_quantization_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/40_conv2d_fwd_quantization/conv2d_fwd_xdl_bias_relu_perlayer_quantization_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/40_conv2d_fwd_quantization/conv2d_fwd_xdl_perchannel_quantization_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/40_conv2d_fwd_quantization/conv2d_fwd_xdl_perlayer_quantization_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/40_conv2d_fwd_quantization/run_conv2d_fwd_bias_perchannel_quantization_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/40_conv2d_fwd_quantization/run_conv2d_fwd_bias_perlayer_quantization_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/40_conv2d_fwd_quantization/run_conv2d_fwd_perchannel_quantization_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/40_conv2d_fwd_quantization/run_conv2d_fwd_perlayer_quantization_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/41_grouped_conv_conv_fwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/41_grouped_conv_conv_fwd/grouped_conv_conv_fwd_xdl_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/41_grouped_conv_conv_fwd/grouped_conv_conv_fwd_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/41_grouped_conv_conv_fwd/grouped_conv_conv_fwd_xdl_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/41_grouped_conv_conv_fwd/grouped_conv_conv_fwd_xdl_int4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/41_grouped_conv_conv_fwd/grouped_conv_conv_fwd_xdl_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/41_grouped_conv_conv_fwd/run_grouped_conv_conv_fwd_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/42_groupnorm_fwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/42_groupnorm_fwd/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/42_groupnorm_fwd/groupnorm_fwd_sigmoid_mul_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/42_groupnorm_fwd/groupnorm_fwd_splitk_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/42_groupnorm_fwd/groupnorm_fwd_swish_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/42_groupnorm_fwd/run_groupnorm_fwd_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/43_splitk_gemm_bias_e_permute/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/43_splitk_gemm_bias_e_permute/splitk_gemm_bias_e_permute_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/43_splitk_gemm_bias_e_permute/splitk_gemm_bias_e_permute_xdl_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/44_elementwise_permute/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/44_elementwise_permute/elementwise_binary_4D_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/44_elementwise_permute/elementwise_permute_4D_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/44_elementwise_permute/elementwise_permute_4D_fp16_col.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/44_elementwise_permute/elementwise_permute_4D_fp16_row.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/44_elementwise_permute/elementwise_permute_4D_fp32_col.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/44_elementwise_permute/elementwise_permute_4D_fp32_row.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/44_elementwise_permute/elementwise_scale_permute_amax_2D_fp16_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/44_elementwise_permute/elementwise_trinary_4D_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/45_elementwise_normalization/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/45_elementwise_normalization/elementwise_layernorm_blockwise.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/46_gemm_add_multiply/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/46_gemm_add_multiply/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/46_gemm_add_multiply/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/46_gemm_add_multiply/gemm_add_multiply_dl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/46_gemm_add_multiply/gemm_add_multiply_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/46_gemm_add_multiply/run_gemm_add_multiply_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/47_gemm_bias_softmax_gemm_permute/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/47_gemm_bias_softmax_gemm_permute/gemm_bias_softmax_gemm_permute_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/48_pool3d_fwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/48_pool3d_fwd/pool3d_fwd_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/48_pool3d_fwd/pool3d_fwd_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/49_maxpool2d_bwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/49_maxpool2d_bwd/maxpool2d_bwd_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/49_maxpool2d_bwd/maxpool2d_bwd_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/49_maxpool2d_bwd/maxpool2d_bwd_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/49_maxpool2d_bwd/maxpool2d_bwd_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/50_put_element/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/50_put_element/put_element_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/51_avgpool3d_bwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/51_avgpool3d_bwd/avgpool3d_bwd_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/51_avgpool3d_bwd/avgpool3d_bwd_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/51_avgpool3d_bwd/avgpool3d_bwd_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/51_avgpool3d_bwd/avgpool3d_bwd_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/52_im2col_col2im/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/52_im2col_col2im/column_to_image_f32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/52_im2col_col2im/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/52_im2col_col2im/image_to_column_f32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/53_layernorm2d_bwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/53_layernorm2d_bwd/layernorm2d_bwd_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/54_groupnorm_bwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/54_groupnorm_bwd/groupnorm_bwd_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/59_grouped_gemm_multi_ABD/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/59_grouped_gemm_multi_ABD/grouped_gemm_multi_abd_xdl_fixed_nk_bias_bf16_i8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/59_grouped_gemm_multi_ABD/grouped_gemm_multi_abd_xdl_fixed_nk_bias_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/60_gemm_multi_ABD/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/60_gemm_multi_ABD/gemm_multi_ABD_xdl_bias_fastgelu_bf16_i8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/60_gemm_multi_ABD/gemm_multi_ABD_xdl_fastgelu_bf16_i8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/60_gemm_multi_ABD/gemm_multi_ABD_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/60_gemm_multi_ABD/gemm_multi_ABD_xdl_multiply_bias_fastgelu_bf16_i8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/61_contraction_multi_ABD/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/61_contraction_multi_ABD/contraction_multi_ABD_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/61_contraction_multi_ABD/contraction_multi_ABD_xdl_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/binary/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/binary/convnd_bwd_data_xdl_bilinear_residual_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/binary/convnd_bwd_weight_xdl_bilinear_residual_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/binary/convnd_fwd_xdl_bilinear_residual_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convinvscale/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convinvscale/convnd_fwd_convinvscale_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convinvscale/convnd_fwd_xdl_convinvscale_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convinvscale/run_convnd_fwd_convinvscale_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convnd_fwd_xdl_scaleadd_scaleadd_relu_bcasted_bias_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convnd_fwd_xdl_scaleadd_scaleadd_relu_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convscale/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convscale/convnd_fwd_convscale_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convscale/convnd_fwd_xdl_convscale_bf8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convscale/convnd_fwd_xdl_convscale_bf8_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convscale/convnd_fwd_xdl_convscale_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convscale/convnd_fwd_xdl_convscale_fp8_bf8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convscale/run_convnd_fwd_convscale_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convscale_add/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convscale_add/convnd_fwd_convscale_add_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convscale_add/convnd_fwd_xdl_convscale_add_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convscale_add/run_convnd_fwd_convscale_add_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convscale_reduce/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convscale_reduce/convnd_fwd_convscale_reduce_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convscale_reduce/convnd_fwd_xdl_convscale_amax_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convscale_reduce/convnd_fwd_xdl_convscale_relu_amax_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convscale_reduce/run_convnd_fwd_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convscale_relu/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convscale_relu/convnd_fwd_convscale_relu_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convscale_relu/convnd_fwd_xdl_convscale_relu_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/convscale_relu/run_convnd_fwd_convscale_relu_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/dynamic_unary/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/dynamic_unary/convnd_fwd_activ_dynamic_unary_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/dynamic_unary/convnd_fwd_xdl_dynamic_abs_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/dynamic_unary/convnd_fwd_xdl_dynamic_clippedrelu_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/dynamic_unary/convnd_fwd_xdl_dynamic_elu_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/dynamic_unary/convnd_fwd_xdl_dynamic_leakyrelu_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/dynamic_unary/convnd_fwd_xdl_dynamic_logistic_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/dynamic_unary/convnd_fwd_xdl_dynamic_passthrough_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/dynamic_unary/convnd_fwd_xdl_dynamic_pow_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/dynamic_unary/convnd_fwd_xdl_dynamic_relu_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/dynamic_unary/convnd_fwd_xdl_dynamic_sigmoid_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/dynamic_unary/convnd_fwd_xdl_dynamic_softrelu_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/dynamic_unary/convnd_fwd_xdl_dynamic_swish_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/dynamic_unary/convnd_fwd_xdl_dynamic_tanh_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/multi_AB/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/multi_AB/conv_fwd_xdl_scaleadd_ab_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/multi_AB/conv_fwd_xdl_scaleadd_ab_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/multi_AB/conv_fwd_xdl_scaleadd_ab_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/multi_AB/conv_fwd_xdl_scaleadd_ab_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/multi_AB/convnd_fwd_activ_multi_ab_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/run_convnd_activ_dynamic_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/run_convnd_activ_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/unary/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/unary/convnd_fwd_activ_unary_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/unary/convnd_fwd_xdl_abs_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/unary/convnd_fwd_xdl_clippedrelu_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/unary/convnd_fwd_xdl_elu_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/unary/convnd_fwd_xdl_leakyrelu_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/unary/convnd_fwd_xdl_logistic_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/unary/convnd_fwd_xdl_passthrough_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/unary/convnd_fwd_xdl_pow_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/unary/convnd_fwd_xdl_relu_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/unary/convnd_fwd_xdl_sigmoid_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/unary/convnd_fwd_xdl_softrelu_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/unary/convnd_fwd_xdl_swish_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/62_convnd_activ/unary/convnd_fwd_xdl_tanh_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/63_layernorm4d_fwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/63_layernorm4d_fwd/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/63_layernorm4d_fwd/layernorm4d_fwd_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/63_layernorm4d_fwd/layernorm4d_fwd_splitk_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/63_layernorm4d_fwd/run_layernorm4d_fwd_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/64_fpAintB_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/64_fpAintB_gemm/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/64_fpAintB_gemm/fp16int8_gemm_wmma.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/64_fpAintB_gemm/run_gemm_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/65_gemm_multiply_multiply/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/65_gemm_multiply_multiply/gemm_add_add_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/65_gemm_multiply_multiply/gemm_multiply_multiply_xdl_fp16_bpreshuffle.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/65_gemm_multiply_multiply/gemm_multiply_multiply_xdl_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/65_gemm_multiply_multiply/gemm_multiply_multiply_xdl_fp8_ab_scale.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/65_gemm_multiply_multiply/gemm_multiply_multiply_xdl_fp8_blockscale_bpreshuffle.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/65_gemm_multiply_multiply/gemm_multiply_multiply_xdl_fp8_bpreshuffle.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/65_gemm_multiply_multiply/gemm_multiply_multiply_xdl_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/65_gemm_multiply_multiply/moe_gemm1_xdl_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/65_gemm_multiply_multiply/moe_gemm1_xdl_fp8_blockscale.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/65_gemm_multiply_multiply/moe_gemm1_xdl_pk_i4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/65_gemm_multiply_multiply/moe_gemm2_xdl_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/65_gemm_multiply_multiply/moe_gemm2_xdl_fp8_blockscale.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/65_gemm_multiply_multiply/moe_gemm2_xdl_pk_i4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/66_complex_contraction_bilinear/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/66_complex_contraction_bilinear/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/66_complex_contraction_bilinear/common_instances.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/66_complex_contraction_bilinear/complex_contraction_bilinear_xdl_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/66_complex_contraction_bilinear/complex_contraction_bilinear_xdl_fp64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/66_complex_contraction_bilinear/run_complex_contraction_bilinear_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/67_gemm_microscaling/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/67_gemm_microscaling/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/67_gemm_microscaling/gemm_mx_bf6.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/67_gemm_microscaling/gemm_mx_bf8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/67_gemm_microscaling/gemm_mx_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/67_gemm_microscaling/gemm_mx_fp4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/67_gemm_microscaling/gemm_mx_fp4_bpreshuffle.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/67_gemm_microscaling/gemm_mx_fp6.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/67_gemm_microscaling/gemm_mx_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/67_gemm_microscaling/gemm_mx_fp8_bf8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/67_gemm_microscaling/moe_gemm1_xdl_mx_fp4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/67_gemm_microscaling/moe_gemm1_xdl_mx_fp4_bns.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/67_gemm_microscaling/moe_gemm1_xdl_mx_fp4_bpreshuffle.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/67_gemm_microscaling/moe_gemm2_xdl_mx_fp4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/67_gemm_microscaling/moe_gemm2_xdl_mx_fp4_bns.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/67_gemm_microscaling/moe_gemm2_xdl_mx_fp4_bpreshuffle.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/bias.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/codegen/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/codegen/cmake_config.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/codegen/cpp_symbol_map.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/codegen/ops/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/codegen/ops/fmha_batch_prefill.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/codegen/ops/fmha_bwd.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/codegen/ops/fmha_fwd.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/codegen/ops/fmha_fwd_appendkv.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/codegen/ops/fmha_fwd_splitkv.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/codegen/ops/fmha_pagedkv_prefill.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/fmha_bwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/fmha_bwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/fmha_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/fmha_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/generate.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/mask.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/misc/gamc.png +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/rotary.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/script/benchmark_bwd.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/script/benchmark_fwd.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/script/run_full_test.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/script/smoke_test_bwd.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/script/smoke_test_fwd.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/01_fmha/utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/02_layernorm2d/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/02_layernorm2d/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/02_layernorm2d/generate.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/02_layernorm2d/layernorm2d_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/02_layernorm2d/layernorm2d_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/02_layernorm2d/misc/dquant.png +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/02_layernorm2d/misc/pnorm.png +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/02_layernorm2d/script/perf_test.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/02_layernorm2d/script/smoke_test.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/03_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/03_gemm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/03_gemm/gemm_basic.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/03_gemm/gemm_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/03_gemm/gemm_weight_preshuffle.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/03_gemm/run_gemm_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/03_gemm/script/benchmark_basic_bf16.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/03_gemm/script/benchmark_basic_bf8.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/03_gemm/script/benchmark_basic_fp16.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/03_gemm/script/benchmark_basic_fp8.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/03_gemm/script/benchmark_mem_pipeline_bf16.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/03_gemm/script/benchmark_mem_pipeline_bf8.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/03_gemm/script/benchmark_mem_pipeline_fp16.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/03_gemm/script/benchmark_mem_pipeline_fp8.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/03_gemm/script/run_full_test.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/03_gemm/script/smoke_test_basic.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/03_gemm/script/smoke_test_mem_pipeline.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/03_gemm/universal_gemm.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/04_img2col/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/04_img2col/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/04_img2col/image_to_column.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/04_img2col/image_to_column.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/05_reduce/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/05_reduce/reduce.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/05_reduce/reduce.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/06_permute/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/06_permute/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/06_permute/alternative_impl/matrix_core_swizzle.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/06_permute/alternative_impl/matrix_core_swizzle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/06_permute/alternative_impl/matrix_core_swizzle_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/06_permute/permute.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/06_permute/permute.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/06_permute/script/smoke_test.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/09_topk_softmax/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/09_topk_softmax/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/09_topk_softmax/script/smoke_test.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/09_topk_softmax/topk_softmax.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/09_topk_softmax/topk_softmax_api.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/09_topk_softmax/topk_softmax_api.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/10_rmsnorm2d/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/10_rmsnorm2d/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/10_rmsnorm2d/example_rmsnorm2d_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/10_rmsnorm2d/generate.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/10_rmsnorm2d/rmsnorm2d_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/10_rmsnorm2d/rmsnorm2d_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/10_rmsnorm2d/script/perf_test.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/10_rmsnorm2d/script/smoke_test.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/add_rmsnorm2d_rdquant_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/add_rmsnorm2d_rdquant_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/example_add_rmsnorm2d_rdquant_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_api.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n1024_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n1536_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n2048_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n256_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n3072_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n4096_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n512_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n64_n128_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n768_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n8192_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n8192_tp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n1024_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n1536_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n2048_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n256_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n3072_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n4096_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n512_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n64_n128_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n768_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n8192_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n8192_tp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_instance_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/script/perf_test.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/11_add_rmsnorm2d_rdquant/script/smoke_test.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/example_smoothquant.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_bf16_n1024_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_bf16_n1536_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_bf16_n2048_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_bf16_n256_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_bf16_n3072_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_bf16_n4096_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_bf16_n4096_tp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_bf16_n512_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_bf16_n64_n128_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_bf16_n768_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_fp16_n1024_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_fp16_n1536_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_fp16_n2048_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_fp16_n256_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_fp16_n3072_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_fp16_n4096_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_fp16_n4096_tp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_fp16_n512_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_fp16_n64_n128_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_fp16_n768_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_fwd_api.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/instances/smoothquant_instance_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/script/perf_test.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/script/smoke_test.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/smoothquant.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/12_smoothquant/smoothquant.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/13_moe_sorting/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/13_moe_sorting/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/13_moe_sorting/moe_sorting.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/13_moe_sorting/moe_sorting_api.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/13_moe_sorting/moe_sorting_api.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/13_moe_sorting/script/smoke_test.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_bf16_n1024_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_bf16_n1536_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_bf16_n2048_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_bf16_n256_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_bf16_n3072_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_bf16_n4096_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_bf16_n4096_tp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_bf16_n512_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_bf16_n64_n128_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_bf16_n768_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_fp16_n1024_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_fp16_n1536_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_fp16_n2048_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_fp16_n256_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_fp16_n3072_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_fp16_n4096_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_fp16_n4096_tp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_fp16_n512_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_fp16_n64_n128_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_fp16_n768_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_fwd_api.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/instances/moe_smoothquant_instance_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/misc/moe-sm.png +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/moe_smoothquant.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/moe_smoothquant.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/script/perf_test.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/14_moe_smoothquant/script/smoke_test.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/15_fused_moe/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/15_fused_moe/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/15_fused_moe/fused_moe.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/15_fused_moe/fused_moegemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/15_fused_moe/fused_moesorting.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/15_fused_moe/instances/fused_moe_api.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/15_fused_moe/instances/fused_moegemm_api.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/15_fused_moe/instances/fused_moegemm_api_internal.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/15_fused_moe/instances/fused_moegemm_api_traits.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/15_fused_moe/instances/fused_moegemm_bf16_m32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/15_fused_moe/instances/fused_moegemm_fp16_m32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/15_fused_moe/instances/fused_moesorting_api.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/15_fused_moe/main.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/15_fused_moe/misc/moe-0.png +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/15_fused_moe/misc/moe-1.png +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/15_fused_moe/misc/moe-2.png +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/15_fused_moe/misc/moe-3.png +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/16_batched_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/16_batched_gemm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/16_batched_gemm/batched_gemm.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/16_batched_gemm/batched_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/16_batched_gemm/run_batched_gemm_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/17_grouped_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/17_grouped_gemm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/17_grouped_gemm/grouped_gemm.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/17_grouped_gemm/grouped_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/17_grouped_gemm/grouped_gemm_tileloop.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/17_grouped_gemm/run_grouped_gemm_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/18_flatmm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/18_flatmm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/18_flatmm/flatmm_basic.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/18_flatmm/flatmm_basic.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/18_flatmm/run_flatmm_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/18_flatmm/script/smoke_test_basic.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/19_gemm_multi_d/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/19_gemm_multi_d/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/19_gemm_multi_d/gemm_multi_d_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/19_gemm_multi_d/gemm_multi_d_fp16.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/19_gemm_multi_d/run_gemm_multi_d_fp16_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/19_gemm_multi_d/utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/20_grouped_convolution/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/20_grouped_convolution/grouped_convolution_backward_weight.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/20_grouped_convolution/grouped_convolution_forward.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/20_grouped_convolution/grouped_convolution_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/20_grouped_convolution/run_grouped_convolution_bwd_weight_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/20_grouped_convolution/run_grouped_convolution_fwd_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/21_elementwise/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/21_elementwise/elementwise_example.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/21_elementwise/elementwise_example_add_4d.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/21_elementwise/elementwise_example_transpose.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/21_elementwise/elementwise_example_unary.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/35_batched_transpose/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/35_batched_transpose/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/35_batched_transpose/batched_transpose_api.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/35_batched_transpose/batched_transpose_example.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/35_batched_transpose/batched_transpose_example.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/35_batched_transpose/script/perf_test.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/35_batched_transpose/script/run_full_test.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/35_batched_transpose/script/smoke_test.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/38_block_scale_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/38_block_scale_gemm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/38_block_scale_gemm/gemm_aquant_basic.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/38_block_scale_gemm/gemm_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/38_block_scale_gemm/run_gemm_aquant_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/example/ck_tile/remod.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/ck.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/config.h.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/filesystem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/host_utility/device_prop.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/host_utility/flush_cache.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/host_utility/hip_check_error.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/host_utility/io.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/host_utility/kernel_launch.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/host_utility/stream_utility.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/library/utility/algorithm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/library/utility/check_err.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/library/utility/conv_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/library/utility/convolution_host_tensor_descriptor_helper.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/library/utility/convolution_parameter.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/library/utility/device_memory.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/library/utility/fill.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/library/utility/host_common_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/library/utility/host_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/library/utility/host_tensor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/library/utility/host_tensor_generator.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/library/utility/iterator.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/library/utility/literals.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/library/utility/numeric.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/library/utility/ranges.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/library/utility/thread.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/problem_transform/transform_forward_convolution3d_into_gemm_v4r4r4_ndhwc_kzyxc_ndhwk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/stream_config.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor/static_tensor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_description/cluster_descriptor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_description/multi_index_transform.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_description/multi_index_transform_helper.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_description/tensor_adaptor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_description/tensor_descriptor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_description/tensor_descriptor_helper.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_description/tensor_space_filling_curve.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_dl_v2r3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_dlops_v2r2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_dlops_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_dpp.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_mx_pipeline_xdlops_base.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_wmma_selector.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_wmmaops.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_wmmaops_base.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_wmmaops_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_wmmaops_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_ab_scale_selector.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_b_preshuffle_dequant_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_b_preshuffle_dequant_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_b_preshuffle_gufusion_dequant_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_b_preshuffle_gufusion_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_b_preshuffle_gufusion_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_b_preshuffle_mx_moe_gufusion_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_b_preshuffle_mx_moe_selector.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_b_preshuffle_mx_moe_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_b_preshuffle_selector.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_b_preshuffle_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_b_preshuffle_v2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_b_preshuffle_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_b_scale_selector.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_base.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_blockscale_b_preshuffle_selector.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_blockscale_b_preshuffle_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_blockscale_b_preshuffle_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_moe_blockscale_b_preshuffle_gufusion_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_moe_blockscale_b_preshuffle_gufusion_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_moe_blockscale_b_preshuffle_selector.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_moe_blockscale_b_preshuffle_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_moe_blockscale_b_preshuffle_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_mx_bpreshuffle_selector.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_mx_moe_gufusion_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_mx_moe_nbs_gufusion_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_mx_moe_nbs_selector.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_mx_moe_nbs_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_mx_moe_nbs_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_mx_moe_selector.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_mx_moe_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_mx_selector.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_selector.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_v1_ab_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_v1_b_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_v1_mx.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_v2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_v2_ab_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_v2_b_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_v3_ab_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_v3_b_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_v3_mx.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_v3_mx_bpreshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_v4.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_v4_b_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_pipeline_xdlops_v5.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_smfmac_xdlops.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_wmma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_xdlops.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_gemm_xdlops_skip_b_lds.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_softmax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_tensor_slice_transfer_v5r1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/blockwise_welford.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/reduction_functions_blockwise.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/thread_group_tensor_slice_transfer_direct_load.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/thread_group_tensor_slice_transfer_gather_direct_load.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/thread_group_tensor_slice_transfer_v4r1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/thread_group_tensor_slice_transfer_v4r1_dequant.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/thread_group_tensor_slice_transfer_v4r1_gather.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/thread_group_tensor_slice_transfer_v4r2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/thread_group_tensor_slice_transfer_v6r1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/thread_group_tensor_slice_transfer_v6r1r2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/thread_group_tensor_slice_transfer_v6r2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/thread_group_tensor_slice_transfer_v6r3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/thread_group_tensor_slice_transfer_v7.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/thread_group_tensor_slice_transfer_v7r2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/thread_group_tensor_slice_transfer_v7r3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/block/thread_group_tensor_slice_transfer_v7r3_scatter.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/conv_tensor_rearrange_op.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/convolution_backward_data_specialization.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/convolution_backward_weight_specialization.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/convolution_forward_specialization.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_avgpool_bwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_base.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_batched_contraction_multiple_d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_batched_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_batched_gemm_e_permute.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_batched_gemm_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_batched_gemm_multi_d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_batched_gemm_multiple_d_gemm_multiple_d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_batched_gemm_softmax_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_batched_gemm_softmax_gemm_permute.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_batchnorm_backward.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_batchnorm_forward.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_batchnorm_infer.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_cgemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_contraction_multiple_abd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_contraction_multiple_d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_conv_bwd_data.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_conv_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_conv_fwd_bias_activation.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_conv_fwd_bias_activation_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_conv_tensor_rearrange.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_elementwise.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_elementwise_normalization.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_elementwise_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_gemm_bias_e_permute.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_gemm_dequantB.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_gemm_multiple_abd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_gemm_multiple_d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_gemm_multiple_d_ab_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_gemm_multiple_d_layernorm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_gemm_multiple_d_multiple_r.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_gemm_mx.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_gemm_reduce.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_gemm_splitk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_gemm_streamk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_gemm_streamk_v2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_gemm_v2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_grouped_contraction_multiple_d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_grouped_conv_bwd_data_multiple_d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_grouped_conv_bwd_weight.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_grouped_conv_bwd_weight_multiple_d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_grouped_conv_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_grouped_conv_fwd_multiple_abd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_grouped_conv_fwd_multiple_d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_grouped_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_grouped_gemm_fixed_nk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_grouped_gemm_multi_abd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_grouped_gemm_multi_abd_fixed_nk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_grouped_gemm_softmax_gemm_permute.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_grouped_gemm_splitk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_grouped_gemm_tile_loop.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_max_pool_bwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_multiple_reduce.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_normalization_bwd_data.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_normalization_bwd_gamma_beta.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_normalization_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_permute.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_pool_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_put_element.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_reduce.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_reduce_multi_d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_softmax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/device_splitk_contraction_multiple_d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/gemm_specialization.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/helper.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/codegen_device_grouped_conv_fwd_multiple_abd_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_avgpool2d_bwd_nhwc_nhwc.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_avgpool3d_bwd_ndhwc_ndhwc.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_batched_contraction_multiple_d_wmma_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_batched_contraction_multiple_d_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_batched_gemm_e_permute_xdl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_batched_gemm_gemm_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_batched_gemm_multi_d_xdl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_batched_gemm_multiple_d_dl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_batched_gemm_multiple_d_gemm_multiple_d_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_batched_gemm_multiple_d_xdl_cshuffle_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_batched_gemm_reduce_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_batched_gemm_softmax_gemm_permute_wmma_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_batched_gemm_softmax_gemm_permute_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_batched_gemm_softmax_gemm_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_batched_gemm_wmma_cshuffle_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_batched_gemm_xdl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_batched_gemm_xdl_fpAintB_b_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_batchnorm_backward_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_batchnorm_forward_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_batchnorm_forward_impl_obsolete.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_cgemm_4gemm_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_column_to_image_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_contraction_multiple_abd_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_contraction_multiple_d_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_contraction_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_conv2d_backward_weight_xdl_c_shuffle_nhwc_kyxc_nhwk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_conv2d_bwd_data_xdl_nhwc_kyxc_nhwk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_conv2d_fwd_xdl_c_shuffle_bias_activation_add_nhwc_kyxc_nhwk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_conv2d_fwd_xdl_c_shuffle_bias_activation_nhwc_kyxc_nhwk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_conv2d_fwd_xdl_c_shuffle_nhwc_kyxc_nhwk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_conv2d_fwd_xdl_nhwc_kyxc_nhwk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_conv3d_fwd_naive_ndhwc_kzyxc_ndhwk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_conv3d_fwd_xdl_ndhwc_kzyxc_ndhwk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_convnd_bwd_data_nwc_kxc_nwk_dl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_convnd_bwd_data_nwc_kxc_nwk_xdl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_elementwise_dynamic_vector_dims_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_elementwise_normalization_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_elementwise_scale_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_fpAintB_gemm_wmma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_bias_add_reduce_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_dl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_dpp.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_multiple_abd_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_multiple_d_dl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_multiple_d_layernorm_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_multiple_d_multiple_r_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_multiple_d_wmma_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_multiple_d_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_multiple_d_xdl_cshuffle_lds_direct_load.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_multiple_d_xdl_cshuffle_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_multiple_d_xdl_cshuffle_v3_ab_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_multiple_d_xdl_cshuffle_v3_b_preshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_multiple_d_xdl_cshuffle_v3_blockscale_bpreshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_reduce_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_wmma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_wmma_cshuffle_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_wmma_cshuffle_v3_b_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_wmma_cshuffle_v3_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_xdl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_xdl_cshuffle_lds_direct_load.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_xdl_cshuffle_streamk_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_xdl_cshuffle_v2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_xdl_cshuffle_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_xdl_cshuffle_v3_b_preshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_xdl_cshuffle_v3_b_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_xdl_cshuffle_v3_mx.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_xdl_cshuffle_v3r1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_xdl_layernorm_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_xdl_skip_b_lds.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_xdl_splitk_c_shuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_xdl_splitk_c_shuffle_lds_direct_load.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_xdl_streamk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_gemm_xdl_waveletmodel_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_contraction_multiple_d_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_conv_bwd_data_multiple_d_wmma_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_conv_bwd_data_multiple_d_xdl_cshuffle_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_conv_bwd_weight_dl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_conv_bwd_weight_explicit_xdl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_conv_bwd_weight_multiple_d_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_conv_bwd_weight_two_stage_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_conv_bwd_weight_wmma_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_conv_bwd_weight_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_conv_bwd_weight_xdl_cshuffle_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_conv_fwd_dl_multiple_d_nhwc_kyxc_nhwk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_conv_fwd_dl_nhwc_kyxc_nhwk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_conv_fwd_multiple_abd_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_conv_fwd_multiple_abd_xdl_cshuffle_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_conv_fwd_multiple_d_multiple_r.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_conv_fwd_multiple_d_multiple_r_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_conv_fwd_multiple_d_wmma_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_conv_fwd_multiple_d_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_conv_fwd_multiple_d_xdl_large_tensor_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_conv_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_gemm_multi_abd_xdl_fixed_nk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_gemm_multiple_d_dl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_gemm_multiple_d_splitk_xdl_cshuffle_two_stage.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_gemm_multiple_d_xdl_cshuffle_tile_loop.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_gemm_softmax_gemm_permute_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_gemm_xdl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_gemm_xdl_fixed_nk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_gemm_xdl_splitk_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_grouped_query_attention_forward_wmma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_image_to_column_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_max_pool_bwd_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_moe_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_moe_gemm_blockscale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_moe_mx_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_moe_mx_gemm_bns.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_moe_mx_gemm_bpreshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_multi_query_attention_forward_wmma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_multiple_reduce_multiblock.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_multiple_reduce_threadwise.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_normalization_bwd_data_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_normalization_bwd_gamma_beta_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_normalization_fwd_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_normalization_fwd_splitk_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_permute_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_pool2d_fwd_nhwc_nhwc.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_pool3d_fwd_ndhwc_ndhwc.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_put_element_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_reduce_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_reduce_multiblock.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_reduce_threadwise.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_reduce_threadwise_multi_d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_softmax_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_sparse_embeddings_forward_layernorm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/impl/device_splitk_contraction_multiple_d_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/masking_specialization.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/matrix_padder.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/reduction_operator_mapping.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/tensor_layout.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/tensor_specialization.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/device/welford_helper.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/element/binary_element_wise_operation.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/element/combined_element_wise_operation.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/element/element_wise_operation.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/element/quantization_operation.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/element/unary_element_wise_operation.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/batchnorm_multiblock/gridwise_multiblock_batchnorm_forward.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/batchnorm_multiblock/gridwise_multiblock_reduce_second_half_batchnorm_backward_final.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/batchnorm_multiblock/gridwise_multiblock_welford_first_half.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/batchnorm_multiblock/gridwise_multiblock_welford_second_half_batchnorm_forward_final_obsolete.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/batchnorm_multiblock/gridwise_multiblock_welford_second_half_multiblock_reduce_first_half.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/block_to_ctile_map.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gemm_layernorm/gridwise_gemm_multiple_d_welford_first_half_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gemm_layernorm/gridwise_welford_second_half_layernorm2d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_2d_multiple_reduction_multiblock.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_2d_multiple_reduction_threadwise.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_2d_reduction_multiblock.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_2d_reduction_threadwise.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_2d_reduction_threadwise_multi_d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_batched_gemm_gemm_xdl_cshuffle_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_batched_gemm_multiple_d_gemm_multiple_d_xdl_cshuffle_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_batched_gemm_multiple_d_softmax_gemm_xdl_cshuffle_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_batched_gemm_softmax_gemm_wmma_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_batched_gemm_softmax_gemm_xdl_cshuffle_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_batchnorm_backward_blockwise_welford.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_batchnorm_forward_blockwise_welford.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_elementwise_1d_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_elementwise_2d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_elementwise_layernorm_welford_variance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_fpAintB_gemm_wmma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_bias_add_reduce_xdl_cshuffle_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_dl_multiple_d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_dl_v1r3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_dpp.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_multiple_abd_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_multiple_d_multiple_r_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_multiple_d_wmma_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_multiple_d_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_multiple_d_xdl_cshuffle_lds_direct_load.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_multiple_d_xdl_splitk_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_pipeline_selector.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_pipeline_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_pipeline_v2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_pipeline_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_pipeline_v4_direct_load.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_reduce_xdl_cshuffle_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_split_k_multiple_d_xdl_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_split_k_multiple_d_xdl_cshuffle_v2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_waveletmodel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_wmma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_wmma_cshuffle_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_wmma_cshuffle_v3_b_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_wmma_cshuffle_v3_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdl_cshuffle_conv_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdl_cshuffle_streamk_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdl_cshuffle_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdl_cshuffle_v2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdl_cshuffle_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdl_cshuffle_v3_b_preshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdl_cshuffle_v3_b_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdl_cshuffle_v3_multi_abd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdl_cshuffle_v3_multi_d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdl_cshuffle_v3_multi_d_ab_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdl_cshuffle_v3_multi_d_b_preshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdl_cshuffle_v3_multi_d_blockscale_b_preshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdl_cshuffle_v3_mx.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdl_cshuffle_v3_mx_bpreshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdl_layernorm_cshuffle_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdl_waveletmodel_cshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdlops_bwd_weight.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdlops_skip_b_lds_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdlops_splitk_lds_direct_load.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdlops_streamk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdlops_v2r3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdlops_v2r4.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdlops_v2r4r2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdlops_v3r1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdlops_v3r2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_gemm_xdlops_v3r3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_moe_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_moe_gemm_blockscale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_moe_mx_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_moe_mx_gemm_bns.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_moe_mx_gemm_bpreshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_permute.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_put_element_1d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_set_buffer_value.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_set_multiple_buffer_value.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_softmax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_sparse_embeddings_forward_layernorm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_sparse_embeddings_forward_layernorm_builtins.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/gridwise_tensor_rearrange.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/normalization/gridwise_normalization_bwd_data.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/normalization/gridwise_normalization_bwd_gamma_beta.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/normalization/gridwise_normalization_naive_variance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/normalization/gridwise_normalization_selector.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/normalization/gridwise_normalization_splitk_1st.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/normalization/gridwise_normalization_splitk_2nd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/grid/normalization/gridwise_normalization_welford_variance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/reduction_functions_threadwise.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/threadwise_contraction_dl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/threadwise_gemm_dlops_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/threadwise_tensor_slice_set.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/threadwise_tensor_slice_transfer.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/threadwise_tensor_slice_transfer_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/threadwise_tensor_slice_transfer_v3r1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/threadwise_tensor_slice_transfer_v3r1_dequant.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/threadwise_tensor_slice_transfer_v3r1_gather.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/threadwise_tensor_slice_transfer_v3r2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/threadwise_tensor_slice_transfer_v4r1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/threadwise_tensor_slice_transfer_v5r1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/threadwise_tensor_slice_transfer_v6r1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/threadwise_tensor_slice_transfer_v6r1r2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/threadwise_tensor_slice_transfer_v6r2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/threadwise_tensor_slice_transfer_v6r3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/threadwise_tensor_slice_transfer_v7.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/threadwise_tensor_slice_transfer_v7r2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/threadwise_tensor_slice_transfer_v7r3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/threadwise_tensor_slice_transfer_v7r3_scatter.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/thread/threadwise_welford.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/warp/dpp_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/warp/smfmac_xdlops_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/warp/wmma_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/gpu/warp/xdlops_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/operator_transform/transform_contraction_to_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/operator_transform/transform_contraction_to_gemm_arraybase.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/operator_transform/transform_conv_bwd_data_to_gemm_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/operator_transform/transform_conv_bwd_weight_to_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/operator_transform/transform_conv_bwd_weight_to_gemm_v2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/operator_transform/transform_conv_fwd_to_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/tensor_operation/operator_transform/transform_conv_ngchw_to_nhwgc.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/amd_address_space.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/amd_buffer_addressing.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/amd_buffer_addressing_builtins.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/amd_ck_fp8.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/amd_gemm_dpp.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/amd_inline_asm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/amd_lds.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/amd_smfmac.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/amd_wave_read_first_lane.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/amd_wmma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/amd_xdlops.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/array.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/array_multi_index.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/blkgemmpipe_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/c_style_pointer_cast.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/common_header.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/container_element_picker.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/container_helper.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/data_type.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/debug.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/dtype_fp64.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/dtype_vector.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/dynamic_buffer.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/e8m0.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/enable_if.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/env.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/f8_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/filter_tuple.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/flush_icache.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/functional.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/functional2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/functional3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/functional4.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/generic_memory_space_atomic.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/get_id.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/get_shift.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/ignore.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/inner_product.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/inner_product_dpp8.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/integral_constant.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/is_detected.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/is_known_at_compile_time.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/loop_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/magic_division.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/math.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/math_v2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/multi_index.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/mxf4_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/mxf6_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/mxf8_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/mxfp_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/number.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/numeric_limits.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/numeric_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/random_gen.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/reduction_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/reduction_enums.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/reduction_functions_accumulate.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/reduction_operator.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/scaled_type_convert.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/sequence.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/sequence_helper.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/span.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/static_buffer.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/statically_indexed_array.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/statically_indexed_array_multi_index.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/synchronization.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/thread_group.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/transpose_vectors.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/tuple.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/tuple_helper.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/type.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/type_convert.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/workgroup_barrier.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/utility/workgroup_synchronization.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/version.h.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/wrapper/layout.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/wrapper/operations/copy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/wrapper/operations/gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/wrapper/tensor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/wrapper/traits/blockwise_gemm_xdl_traits.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/wrapper/utils/kernel_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/wrapper/utils/layout_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/wrapper/utils/tensor_partition.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck/wrapper/utils/tensor_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/algorithm/cluster_descriptor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/algorithm/coordinate_transform.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/algorithm/indexing_adaptor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/algorithm/space_filling_curve.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/algorithm/static_encoding_pattern.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/arch/amd_buffer_addressing.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/arch/amd_buffer_addressing_builtins.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/arch/amd_transpose_load_encoding.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/arch/arch.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/arch/generic_memory_space_atomic.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/arch/utility.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/arch/workgroup_barrier.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/config.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/container/array.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/container/container_helper.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/container/map.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/container/meta_data_buffer.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/container/multi_index.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/container/sequence.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/container/span.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/container/statically_indexed_array.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/container/thread_buffer.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/container/tuple.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/numeric/bfloat16.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/numeric/float8.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/numeric/half.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/numeric/int8.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/numeric/integer.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/numeric/integral_constant.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/numeric/math.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/numeric/mxfp_convert.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/numeric/null_type.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/numeric/numeric.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/numeric/pk_fp4.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/numeric/pk_int4.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/numeric/type_convert.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/numeric/vector_type.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/buffer_view.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/load_tile.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/load_tile_transpose.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/null_tensor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/null_tile_window.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/shuffle_tile.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/slice_tile.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/static_distributed_tensor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/store_tile.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/sweep_tile.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/tensor_adaptor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/tensor_adaptor_coordinate.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/tensor_coordinate.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/tensor_descriptor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/tensor_view.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/tile_distribution.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/tile_distribution_encoding.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/tile_elementwise.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/tile_scatter_gather.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/tile_window.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/tile_window_base.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/tile_window_linear.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/tile_window_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/transpose_tile.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/tensor/update_tile.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/utility/bit_cast.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/utility/debug.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/utility/env.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/utility/functional.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/utility/functional_with_tuple.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/utility/ignore.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/utility/literals.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/utility/magic_div.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/utility/philox_rand.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/utility/random.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/utility/reduce_operator.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/utility/static_counter.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/utility/to_sequence.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/utility/transpose_vectors.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/utility/type_traits.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core/utility/unary_element_function.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/core.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/arg_parser.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/check_err.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/concat.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/convolution_host_tensor_descriptor_helper.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/convolution_parameter.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/device_memory.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/device_prop.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/fill.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/flush_icache.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/hip_check_error.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/host_tensor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/joinable_thread.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/kernel_launch.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/ranges.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_batched_dropout.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_batched_elementwise.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_batched_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_batched_masking.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_batched_rotary_position_embedding.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_batched_softmax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_batched_transpose.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_elementwise.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_fused_moe.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_grouped_conv_bwd_weight.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_grouped_conv_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_im2col.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_layernorm2d_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_moe_sorting.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_permute.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_reduce.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_rmsnorm2d_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_rowwise_quantization2d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_softmax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_topk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/reference/reference_transpose.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/rotating_buffers.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/stream_config.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/stream_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host/timer.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/host.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/add_rmsnorm2d_rdquant/kernel/add_rmsnorm2d_rdquant_fwd_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/add_rmsnorm2d_rdquant/pipeline/add_rmsnorm2d_rdquant_fwd_pipeline_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/add_rmsnorm2d_rdquant/pipeline/add_rmsnorm2d_rdquant_fwd_pipeline_one_pass.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/add_rmsnorm2d_rdquant/pipeline/add_rmsnorm2d_rdquant_fwd_pipeline_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/add_rmsnorm2d_rdquant/pipeline/add_rmsnorm2d_rdquant_fwd_pipeline_three_pass.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/add_rmsnorm2d_rdquant.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/batched_transpose/kernel/batched_transpose_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/batched_transpose/pipeline/batched_transpose_common_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/batched_transpose/pipeline/batched_transpose_lds_pipeline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/batched_transpose/pipeline/batched_transpose_lds_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/batched_transpose/pipeline/batched_transpose_lds_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/batched_transpose/pipeline/batched_transpose_pipeline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/batched_transpose/pipeline/batched_transpose_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/batched_transpose/pipeline/batched_transpose_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/batched_transpose.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/common/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/common/generic_2d_block_shape.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/common/tensor_layout.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/common/utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/elementwise/binary_elementwise_operation.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/elementwise/kernel/elementwise_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/elementwise/pipeline/elementwise_pipeline_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/elementwise/pipeline/elementwise_pipeline_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/elementwise/pipeline/elementwise_shape.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/elementwise/unary_element_wise_operation.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/elementwise.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/epilogue/cshuffle_epilogue.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/epilogue/default_2d_and_dynamic_quant_epilogue.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/epilogue/default_2d_epilogue.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/epilogue/dynamic_quant_epilogue.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/epilogue.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/flatmm/block/block_flatmm_asmem_bsmem_creg_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/flatmm/block/block_flatmm_asmem_bsmem_creg_v1_custom_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/flatmm/block/flatmm_32x512x128_1x4x1_16x16x32.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/flatmm/block/flatmm_sn_32x128x512_1x4x1_16x16x32.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/flatmm/block/flatmm_sn_32x128x512_1x4x1_16x16x32_itl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/flatmm/block/flatmm_uk_config.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/flatmm/block/uk/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/flatmm/block/uk/flatmm_sn_uk_gfx9_32x128x512_1x4x1_16x16x16.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/flatmm/block/uk/flatmm_sn_uk_gfx9_32x128x512_1x4x1_16x16x16_itl.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/flatmm/block/uk/flatmm_uk_gfx9_32x512x128_1x1x1_16x16x16.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/flatmm/kernel/flatmm_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/flatmm/pipeline/flatmm_pipeline_agmem_bgmem_creg_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/flatmm/pipeline/flatmm_pipeline_agmem_bgmem_creg_v1_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/flatmm/pipeline/tile_flatmm_shape.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/flatmm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/block/block_attention_bias_enum.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/block/block_dropout.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/block/block_masking.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/block/block_position_encoding.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/block/block_rotary_embedding.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/block/page_block_navigator.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/block/variants.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/kernel/fmha_batch_prefill_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/kernel/fmha_bwd_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/kernel/fmha_fwd_appendkv_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/kernel/fmha_fwd_appendkv_tile_partitioner.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/kernel/fmha_fwd_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/kernel/fmha_fwd_pagedkv_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/kernel/fmha_fwd_splitkv_combine_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/kernel/fmha_fwd_splitkv_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_batch_prefill_pipeline_qr_ks_vs_async.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_batch_prefill_pipeline_qr_ks_vs_async_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_bwd_convert_dq.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_bwd_dot_do_o.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_bwd_dq_dk_dv_pipeline_kr_ktr_vr.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_bwd_dq_dk_dv_pipeline_kr_ktr_vr_iglp.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_bwd_pipeline_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_bwd_pipeline_enum.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_bwd_pipeline_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_fwd_appendkv_pipeline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_fwd_appendkv_pipeline_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_fwd_pagedkv_pipeline_qr_ks_vs.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_fwd_pagedkv_pipeline_qr_ks_vs_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_fwd_splitkv_combine_pipeline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_fwd_splitkv_combine_pipeline_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_fwd_splitkv_pipeline_nwarp_sshuffle_qr_ks_vs.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_fwd_splitkv_pipeline_nwarp_sshuffle_qr_ks_vs_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_fwd_splitkv_pipeline_qr_ks_vs.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_fwd_splitkv_pipeline_qr_ks_vs_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_pipeline_enum.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_pipeline_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_pipeline_qr_ks_vs.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_pipeline_qr_ks_vs_async.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_pipeline_qr_ks_vs_async_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_pipeline_qr_ks_vs_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_pipeline_qr_ks_vs_fp8.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_pipeline_qr_ks_vs_whole_k_prefetch.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_pipeline_qr_ks_vs_whole_k_prefetch_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_pipeline_qs_ks_vs.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_pipeline_qs_ks_vs_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/block_fmha_pipeline_qx_ks_vs_custom_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/tile_fmha_shape.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha/pipeline/tile_fmha_traits.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fmha.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fused_moe/kernel/fused_moegemm_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fused_moe/kernel/fused_moegemm_shape.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fused_moe/kernel/fused_moegemm_tile_partitioner.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fused_moe/kernel/moe_sorting_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fused_moe/kernel/moe_sorting_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fused_moe/pipeline/fused_moegemm_pipeline_flatmm_ex.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fused_moe/pipeline/fused_moegemm_pipeline_flatmm_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fused_moe/pipeline/fused_moegemm_pipeline_flatmm_uk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fused_moe/pipeline/fused_moegemm_pipeline_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fused_moe/pipeline/fused_moegemm_traits.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fused_moe/pipeline/moe_sorting_pipeline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fused_moe/pipeline/moe_sorting_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/fused_moe.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_gemm_areg_bgmem_creg_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_gemm_areg_bgmem_creg_v1_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_gemm_areg_breg_creg_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_gemm_areg_breg_creg_v1_custom_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_gemm_areg_breg_creg_v1_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_gemm_areg_bsmem_creg_one_warp_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_gemm_areg_bsmem_creg_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_gemm_areg_bsmem_creg_v1_custom_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_gemm_areg_bsmem_creg_v1_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_gemm_areg_bsmem_creg_v2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_gemm_areg_bsmem_creg_v2_custom_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_gemm_areg_bsmem_creg_v2_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_gemm_areg_bsmem_creg_v2r1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_gemm_asmem_breg_creg_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_gemm_asmem_breg_creg_v1_custom_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_gemm_asmem_breg_creg_v1_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_gemm_asmem_bsmem_creg_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_gemm_asmem_bsmem_creg_v1_custom_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_gemm_asmem_bsmem_creg_v1_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_gemm_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_universal_gemm_as_bs_cr.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_wp_asmem_bsmem_creg_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/block/block_wp_asmem_bsmem_creg_v1_custom_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/kernel/batched_gemm_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/kernel/gemm_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/kernel/gemm_multi_d_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/kernel/gemm_tile_partitioner.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/kernel/grouped_gemm_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/kernel/universal_gemm_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/pipeline/gemm_pipeline_ag_bg_cr_base.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/pipeline/gemm_pipeline_ag_bg_cr_comp_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/pipeline/gemm_pipeline_ag_bg_cr_comp_v4.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/pipeline/gemm_pipeline_ag_bg_cr_comp_v4_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/pipeline/gemm_pipeline_ag_bg_cr_comp_v5.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/pipeline/gemm_pipeline_ag_bg_cr_comp_v5_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/pipeline/gemm_pipeline_ag_bg_cr_mem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/pipeline/gemm_pipeline_ag_bg_cr_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/pipeline/gemm_pipeline_agmem_bgmem_creg_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/pipeline/gemm_pipeline_agmem_bgmem_creg_v1_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/pipeline/gemm_pipeline_agmem_bgmem_creg_v2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/pipeline/gemm_pipeline_agmem_bgmem_creg_v2_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/pipeline/gemm_pipeline_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/pipeline/gemm_universal_pipeline_ag_bg_cr_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/pipeline/tile_gemm_shape.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/pipeline/tile_gemm_traits.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/pipeline/wp_pipeline_agmem_bgmem_creg_v1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/pipeline/wp_pipeline_agmem_bgmem_creg_v1_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/warp/warp_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/warp/warp_gemm_attribute_mfma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/warp/warp_gemm_attribute_mfma_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/warp/warp_gemm_attribute_smfmac.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/warp/warp_gemm_attribute_smfmac_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/warp/warp_gemm_dispatcher.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/warp/warp_gemm_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm/warp/warp_gemm_smfmac_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm_group_quant/block/block_universal_gemm_as_aquant_bs_cr.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm_group_quant/kernel/gemm_aquant_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm_group_quant/pipeline/gemm_aquant_pipeline_ag_bg_cr_base.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm_group_quant/pipeline/gemm_aquant_pipeline_ag_bg_cr_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm_group_quant/pipeline/gemm_aquant_pipeline_ag_bg_cr_v3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm_group_quant/pipeline/gemm_aquant_pipeline_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm_group_quant/pipeline/gemm_group_quant_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm_group_quant/pipeline/tile_gemm_aquant_traits.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/gemm_group_quant.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/grouped_convolution/kernel/grouped_convolution_backward_weight_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/grouped_convolution/kernel/grouped_convolution_forward_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/grouped_convolution/utils/convolution_specialization.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/grouped_convolution/utils/grouped_convolution_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/grouped_convolution/utils/transform_conv_bwd_weight_to_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/grouped_convolution/utils/transform_conv_fwd_to_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/grouped_convolution.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/image_to_column/kernel/image_to_column_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/image_to_column/pipeline/block_image_to_column_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/image_to_column/pipeline/tile_image_to_column_shape.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/image_to_column.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/layernorm2d/kernel/layernorm2d_fwd_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/layernorm2d/pipeline/layernorm2d_fwd_pipeline_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/layernorm2d/pipeline/layernorm2d_fwd_pipeline_one_pass.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/layernorm2d/pipeline/layernorm2d_fwd_pipeline_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/layernorm2d/pipeline/layernorm2d_fwd_pipeline_two_pass.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/layernorm2d/pipeline/layernorm2d_fwd_traits.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/layernorm2d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/norm_reduce/block/block_norm_reduce.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/norm_reduce/block/block_norm_reduce_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/norm_reduce/thread/thread_welford.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/norm_reduce.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/permute/kernel/generic_permute_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/permute/pipeline/generic_petmute_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/permute.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/reduce/block/block_reduce.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/reduce/block/block_reduce2d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/reduce/block/block_reduce2d_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/reduce/block/block_reduce2d_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/reduce.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/rmsnorm2d/kernel/rmsnorm2d_fwd_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/rmsnorm2d/pipeline/rmsnorm2d_fwd_pipeline_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/rmsnorm2d/pipeline/rmsnorm2d_fwd_pipeline_model_sensitive_pass.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/rmsnorm2d/pipeline/rmsnorm2d_fwd_pipeline_one_pass.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/rmsnorm2d/pipeline/rmsnorm2d_fwd_pipeline_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/rmsnorm2d/pipeline/rmsnorm2d_fwd_pipeline_two_pass.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/rmsnorm2d/pipeline/rmsnorm2d_fwd_traits.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/rmsnorm2d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/smoothquant/kernel/moe_smoothquant_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/smoothquant/kernel/smoothquant_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/smoothquant/pipeline/smoothquant_pipeline_default_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/smoothquant/pipeline/smoothquant_pipeline_one_pass.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/smoothquant/pipeline/smoothquant_pipeline_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/smoothquant/pipeline/smoothquant_pipeline_two_pass.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/smoothquant.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/softmax/block/block_softmax_2d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/softmax/block/block_softmax_2d_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/softmax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/topk/block/block_topk_stream_2d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/topk/block/block_topk_stream_2d_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/topk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/topk_softmax/kernel/topk_softmax_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/topk_softmax/pipeline/topk_softmax_warp_per_row_pipeline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/topk_softmax/pipeline/topk_softmax_warp_per_row_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/topk_softmax/pipeline/topk_softmax_warp_per_row_problem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ops/topk_softmax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ref/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/ref/naive_attention.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/include/ck_tile/remod.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_avgpool_bwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_batched_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_batchnorm_backward.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_batchnorm_forward.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_batchnorm_infer.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_cgemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_column_to_image.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_contraction.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_conv_bwd_data.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_conv_bwd_weight.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_conv_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_conv_fwd_bias_activation.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_conv_fwd_bias_activation_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_elementwise.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_fpAintB_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_gemm_layernorm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_gemm_multiple_d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_groupnorm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_groupnorm_bwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_image_to_column.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_layernorm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_layernorm_bwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_maxpool_bwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_moe_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_moe_gemm1_blockscale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_moe_gemm2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_moe_gemm2_blockscale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_moe_mx_gemm1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_moe_mx_gemm2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_mx_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_pool_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_reduce.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_softmax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/cpu/reference_sparse_embedding3_forward_layernorm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/gpu/naive_conv_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/reference_tensor_operation/gpu/reference_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/add_device_operation_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/add_grouped_conv_bwd_wei_exp_device_operation_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/device_operation_instance_factory.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/avg_pool2d_bwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/avg_pool3d_bwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/batched_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/batched_gemm_add_relu_gemm_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/batched_gemm_b_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/batched_gemm_bias_permute.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/batched_gemm_bias_softmax_gemm_permute.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/batched_gemm_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/batched_gemm_multi_d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/batched_gemm_softmax_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/batched_gemm_softmax_gemm_permute.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/batchnorm_backward.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/batchnorm_forward.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/batchnorm_infer.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/contraction/device_contraction_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/contraction_bilinear.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/contraction_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/conv_tensor_rearrange/device_column_to_image_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/conv_tensor_rearrange/device_image_to_column_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/conv_tensor_rearrange.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/convolution_backward_data.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/convolution_forward.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/device_elementwise_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/device_gemm_mean_squaremean_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/device_gemm_xdl_c_shuffle_fp8_fp8_fp8_mk_kn_mn_v1_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/device_gemm_xdl_c_shuffle_fp8_fp8_fp8_mk_kn_mn_v1_interwave_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/device_gemm_xdl_c_shuffle_fp8_fp8_fp8_mk_kn_mn_v2_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/elementwise_normalization.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_ab_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_add_add_fastgelu.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_add_fastgelu.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_add_multiply.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_add_relu.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_add_relu_add_layernorm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_add_silu.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_b_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_bilinear.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_blockscale_wp.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_dl.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_dpp.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_fastgelu.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_multi_abd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_multiply_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_multiply_multiply.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_multiply_multiply_wp.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_mx.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_splitk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_streamk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_universal.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_universal_batched.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_universal_preshuffle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_universal_preshuffle.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_universal_reduce.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_universal_streamk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_universal_wmma.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_universal_xdl.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_wmma.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/gemm_xdl.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_bwd_data/device_grouped_conv_bwd_data_transpose_xdl_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_bwd_data/device_grouped_conv_bwd_data_wmma_f16_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_bwd_data/device_grouped_conv_bwd_data_wmma_i8_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_bwd_data/device_grouped_conv_bwd_data_xdl_bilinear_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_bwd_data/device_grouped_conv_bwd_data_xdl_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_bwd_data/device_grouped_conv_bwd_data_xdl_scale_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_bwd_weight/device_exp_gemm_xdl_universal_km_kn_mn_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_bwd_weight/device_grouped_conv_bwd_weight_dl_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_bwd_weight/device_grouped_conv_bwd_weight_two_stage_xdl_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_bwd_weight/device_grouped_conv_bwd_weight_v3_xdl_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_bwd_weight/device_grouped_conv_bwd_weight_wmma_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_bwd_weight/device_grouped_conv_bwd_weight_xdl_bilinear_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_bwd_weight/device_grouped_conv_bwd_weight_xdl_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_bwd_weight/device_grouped_conv_bwd_weight_xdl_scale_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_fwd/device_grouped_conv_fwd_dl_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_fwd/device_grouped_conv_fwd_wmma_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_fwd/device_grouped_conv_fwd_xdl_bilinear_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_fwd/device_grouped_conv_fwd_xdl_binary_outelementop_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_fwd/device_grouped_conv_fwd_xdl_comp_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_fwd/device_grouped_conv_fwd_xdl_dynamic_op_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_fwd/device_grouped_conv_fwd_xdl_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_fwd/device_grouped_conv_fwd_xdl_large_tensor_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_fwd/device_grouped_conv_fwd_xdl_mem_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_fwd/device_grouped_conv_fwd_xdl_merged_groups_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_fwd/device_grouped_conv_fwd_xdl_outelementop_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_fwd/device_grouped_conv_fwd_xdl_scale_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_fwd/device_grouped_conv_fwd_xdl_scaleadd_ab_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_conv_fwd/device_grouped_conv_fwd_xdl_scaleadd_scaleadd_relu_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_backward_data.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_backward_data_bilinear.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_backward_data_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_backward_data_wmma.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_backward_data_xdl.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_backward_weight.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_backward_weight_bilinear.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_backward_weight_dl.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_backward_weight_explicit_xdl.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_backward_weight_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_backward_weight_wmma.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_backward_weight_xdl.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_bias_clamp.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_bias_clamp_xdl.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_bilinear.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_clamp.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_clamp_xdl.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_comp_xdl.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_convinvscale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_convscale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_convscale_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_convscale_relu.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_dl.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_dynamic_op.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_mem_inter_xdl.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_mem_intra_xdl.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_scaleadd_ab.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_scaleadd_scaleadd_relu.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_wmma.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_xdl.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_xdl_large_tensor.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_convolution_forward_xdl_merged_groups.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_splitk_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_gemm_bias.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_gemm_fastgelu.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_gemm_fixed_nk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_gemm_multi_abd_fixed_nk.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_gemm_tile_loop.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/grouped_gemm_tile_loop_multiply.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/groupnorm_bwd_data.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/groupnorm_bwd_gamma_beta.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/layernorm_bwd_data.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/layernorm_bwd_gamma_beta.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/max_pool_bwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/normalization_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/normalization_fwd_swish.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/permute_scale/device_permute_scale_instances.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/permute_scale.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/pool2d_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/pool3d_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/quantization/gemm_quantization.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/quantization/grouped_convolution_bias_forward_perchannel_quantization.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/quantization/grouped_convolution_bias_forward_perlayer_quantization.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/quantization/grouped_convolution_forward_perchannel_quantization.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/quantization/grouped_convolution_forward_perlayer_quantization.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_b16_f32_b16_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_b16_f32_b16_amax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_b16_f32_b16_avg.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_b16_f32_b16_max.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_b16_f32_b16_min.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_b16_f32_b16_norm2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f16_f16_f16_amax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f16_f16_f16_max.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f16_f16_f16_min.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f16_f32_f16_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f16_f32_f16_avg.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f16_f32_f16_norm2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f32_f32_f32_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f32_f32_f32_amax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f32_f32_f32_avg.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f32_f32_f32_max.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f32_f32_f32_min.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f32_f32_f32_norm2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f32_f64_f32_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f32_f64_f32_avg.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f32_f64_f32_norm2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f64_f64_f64_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f64_f64_f64_amax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f64_f64_f64_avg.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f64_f64_f64_max.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f64_f64_f64_min.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f64_f64_f64_norm2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_i8_i32_i8_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_i8_i32_i8_avg.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_i8_i8_i8_amax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_i8_i8_i8_max.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_i8_i8_i8_min.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_impl_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add_b16_f32_f32_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add_b16_f32_f32_avg.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add_f16_f32_f32_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add_f16_f32_f32_avg.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add_f32_f32_f32_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add_f32_f32_f32_avg.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add_f32_f64_f32_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add_f32_f64_f32_avg.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add_f64_f64_f64_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add_f64_f64_f64_avg.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_b16_f32_b16_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_b16_f32_b16_amax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_b16_f32_b16_avg.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_b16_f32_b16_max.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_b16_f32_b16_min.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_b16_f32_b16_norm2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f16_f16_f16_amax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f16_f16_f16_max.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f16_f16_f16_min.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f16_f32_f16_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f16_f32_f16_avg.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f16_f32_f16_norm2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f32_f32_f32_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f32_f32_f32_amax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f32_f32_f32_avg.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f32_f32_f32_max.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f32_f32_f32_min.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f32_f32_f32_norm2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f32_f64_f32_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f32_f64_f32_avg.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f32_f64_f32_norm2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f64_f64_f64_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f64_f64_f64_amax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f64_f64_f64_avg.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f64_f64_f64_max.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f64_f64_f64_min.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f64_f64_f64_norm2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_i8_i32_i8_add.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_i8_i32_i8_avg.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_i8_i8_i8_amax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_i8_i8_i8_max.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_i8_i8_i8_min.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/reduce/reduce.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/softmax/device_softmax_f16_f16_instance_rank3_reduce1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/softmax/device_softmax_f16_f16_instance_rank3_reduce2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/softmax/device_softmax_f16_f16_instance_rank3_reduce3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/softmax/device_softmax_f16_f16_instance_rank4_reduce1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/softmax/device_softmax_f16_f16_instance_rank4_reduce2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/softmax/device_softmax_f16_f16_instance_rank4_reduce3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/softmax/device_softmax_f16_f16_instance_rank4_reduce4.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/softmax/device_softmax_f16_f16_instance_type.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/softmax/device_softmax_f32_f32_instance_rank3_reduce1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/softmax/device_softmax_f32_f32_instance_rank3_reduce2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/softmax/device_softmax_f32_f32_instance_rank3_reduce3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/softmax/device_softmax_f32_f32_instance_rank4_reduce1.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/softmax/device_softmax_f32_f32_instance_rank4_reduce2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/softmax/device_softmax_f32_f32_instance_rank4_reduce3.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/softmax/device_softmax_f32_f32_instance_rank4_reduce4.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/softmax/device_softmax_f32_f32_instance_type.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/softmax/device_softmax_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/softmax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/transpose/device_transpose_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/include/ck/library/tensor_operation_instance/gpu/transpose_3d.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/avg_pool2d_bwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/avg_pool2d_bwd/device_avg_pool2d_bwd_nhwc_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/avg_pool2d_bwd/device_avg_pool2d_bwd_nhwc_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/avg_pool2d_bwd/device_avg_pool2d_bwd_nhwc_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/avg_pool2d_bwd/device_avg_pool2d_bwd_nhwc_f8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/avg_pool2d_bwd/device_avg_pool2d_bwd_nhwc_instance_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/avg_pool2d_bwd/device_avg_pool2d_bwd_nhwc_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/avg_pool3d_bwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/avg_pool3d_bwd/avg_pool3d_bwd_ndhwc_instance_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/avg_pool3d_bwd/device_avg_pool3d_bwd_ndhwc_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/avg_pool3d_bwd/device_avg_pool3d_bwd_ndhwc_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/avg_pool3d_bwd/device_avg_pool3d_bwd_ndhwc_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_wmma_universal_bf16_bf16_bf16_gkm_gkn_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_wmma_universal_bf16_bf16_bf16_gkm_gnk_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_wmma_universal_bf16_bf16_bf16_gmk_gkn_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_wmma_universal_bf16_bf16_bf16_gmk_gnk_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_wmma_universal_f16_f16_f16_gkm_gkn_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_wmma_universal_f16_f16_f16_gkm_gnk_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_wmma_universal_f16_f16_f16_gmk_gkn_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_wmma_universal_f16_f16_f16_gmk_gnk_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_xdl_bf16_bf16_bf16_gkm_gkn_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_xdl_bf16_bf16_bf16_gkm_gnk_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_xdl_bf16_bf16_bf16_gmk_gkn_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_xdl_bf16_bf16_bf16_gmk_gnk_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_xdl_f16_f16_f16_gkm_gkn_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_xdl_f16_f16_f16_gkm_gnk_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_xdl_f16_f16_f16_gmk_gkn_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_xdl_f16_f16_f16_gmk_gnk_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_xdl_f32_f32_f32_gkm_gkn_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_xdl_f32_f32_f32_gkm_gnk_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_xdl_f32_f32_f32_gmk_gkn_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_xdl_f32_f32_f32_gmk_gnk_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_xdl_int8_int8_int8_gkm_gkn_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_xdl_int8_int8_int8_gkm_gnk_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_xdl_int8_int8_int8_gmk_gkn_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm/device_batched_gemm_xdl_int8_int8_int8_gmk_gnk_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_add_relu_gemm_add/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_add_relu_gemm_add/device_batched_gemm_add_relu_gemm_add_xdl_cshuffle_f16_f16_f16_f16_gmk_gnk_gno_gmo_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_add_relu_gemm_add/device_batched_gemm_add_relu_gemm_add_xdl_cshuffle_f16_f16_f16_f16_gmk_gnk_gon_gmo_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_b_scale/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_b_scale/device_batched_gemm_b_scale_xdl_f16_i4_f16/device_batched_gemm_b_scale_xdl_f16_i4_f16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_b_scale/device_batched_gemm_b_scale_xdl_f16_i4_f16/device_batched_gemm_b_scale_xdl_f16_i4_f16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_bias_permute/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_bias_permute/device_batched_gemm_bias_permute_m2_n3_k1_xdl_c_shuffle_f16_f16_f16_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_gemm/device_batched_gemm_gemm_xdl_cshuffle_f16_f16_f16_f16_gmk_gnk_gno_gmo_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_gemm/device_batched_gemm_gemm_xdl_cshuffle_f16_f16_f16_f16_gmk_gnk_gon_gmo_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_multi_d/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_multi_d/device_batched_gemm_multi_d_dl_f16_f16_f16_gkm_gkn_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_multi_d/device_batched_gemm_multi_d_dl_f16_f16_f16_gkm_gkn_gmn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_multi_d/device_batched_gemm_multi_d_dl_f16_f16_f16_gkm_gnk_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_multi_d/device_batched_gemm_multi_d_dl_f16_f16_f16_gkm_gnk_gmn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_multi_d/device_batched_gemm_multi_d_dl_f16_f16_f16_gmk_gkn_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_multi_d/device_batched_gemm_multi_d_dl_f16_f16_f16_gmk_gkn_gmn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_multi_d/device_batched_gemm_multi_d_dl_f16_f16_f16_gmk_gnk_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_multi_d/device_batched_gemm_multi_d_dl_f16_f16_f16_gmk_gnk_gmn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_multi_d/device_batched_gemm_multi_d_dl_i8_i8_i8_gkm_gkn_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_multi_d/device_batched_gemm_multi_d_dl_i8_i8_i8_gkm_gkn_gmn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_multi_d/device_batched_gemm_multi_d_dl_i8_i8_i8_gkm_gnk_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_multi_d/device_batched_gemm_multi_d_dl_i8_i8_i8_gkm_gnk_gmn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_multi_d/device_batched_gemm_multi_d_dl_i8_i8_i8_gmk_gkn_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_multi_d/device_batched_gemm_multi_d_dl_i8_i8_i8_gmk_gkn_gmn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_multi_d/device_batched_gemm_multi_d_dl_i8_i8_i8_gmk_gnk_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_multi_d/device_batched_gemm_multi_d_dl_i8_i8_i8_gmk_gnk_gmn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_reduce/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_reduce/device_batched_gemm_reduce_xdl_cshuffle_f16_f16_f16_f32_f32_gkm_gkn_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_reduce/device_batched_gemm_reduce_xdl_cshuffle_f16_f16_f16_f32_f32_gkm_gnk_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_reduce/device_batched_gemm_reduce_xdl_cshuffle_f16_f16_f16_f32_f32_gmk_gkn_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_reduce/device_batched_gemm_reduce_xdl_cshuffle_f16_f16_f16_f32_f32_gmk_gnk_gmn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_softmax_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_softmax_gemm/device_batched_gemm_softmax_gemm_xdl_cshuffle_f16_f16_f16_f16_gmk_gnk_gno_gmo_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_softmax_gemm_permute/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_softmax_gemm_permute/device_batched_gemm_bias_softmax_gemm_permute_xdl_cshuffle_bf16_bf16_bf16_bf16_gmk_gnk_gno_gmo_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_softmax_gemm_permute/device_batched_gemm_bias_softmax_gemm_permute_xdl_cshuffle_f16_f16_f16_f16_gmk_gnk_gno_gmo_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_softmax_gemm_permute/device_batched_gemm_softmax_gemm_permute_xdl_cshuffle_bf16_bf16_bf16_bf16_gmk_gnk_gno_gmo_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batched_gemm_softmax_gemm_permute/device_batched_gemm_softmax_gemm_permute_xdl_cshuffle_f16_f16_f16_f16_gmk_gnk_gno_gmo_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batchnorm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batchnorm/device_batchnorm_backward_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batchnorm/device_batchnorm_backward_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batchnorm/device_batchnorm_backward_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batchnorm/device_batchnorm_backward_f64_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batchnorm/device_batchnorm_forward_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batchnorm/device_batchnorm_forward_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batchnorm/device_batchnorm_forward_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batchnorm/device_batchnorm_forward_f64_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batchnorm/device_batchnorm_infer_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batchnorm/device_batchnorm_infer_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batchnorm/device_batchnorm_infer_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/batchnorm/device_batchnorm_infer_f64_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/column_to_image/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/column_to_image/device_column_to_image_gndhwc_3d_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/column_to_image/device_column_to_image_gnhwc_2d_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/column_to_image/device_column_to_image_gnwc_1d_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/column_to_image/device_column_to_image_ndhwgc_3d_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/column_to_image/device_column_to_image_nhwgc_2d_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/column_to_image/device_column_to_image_nwgc_1d_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_bf16_bf16_bf16_bf16_compute_f32_kknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_bf16_bf16_bf16_bf16_compute_f32_knnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_bf16_bf16_bf16_bf16_compute_f32_mknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_bf16_bf16_bf16_bf16_compute_f32_mnnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f16_f16_f16_f16_compute_f32_kknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f16_f16_f16_f16_compute_f32_knnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f16_f16_f16_f16_compute_f32_mknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f16_f16_f16_f16_compute_f32_mnnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_f32_compute_bf16_kknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_f32_compute_bf16_knnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_f32_compute_bf16_mknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_f32_compute_bf16_mnnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_f32_compute_f16_kknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_f32_compute_f16_knnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_f32_compute_f16_mknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_f32_compute_f16_mnnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_f32_kknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_f32_knnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_f32_mknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_f32_mnnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f64_f64_f64_f64_compute_f32_kknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f64_f64_f64_f64_compute_f32_knnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f64_f64_f64_f64_compute_f32_mknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f64_f64_f64_f64_compute_f32_mnnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f64_f64_f64_f64_kknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f64_f64_f64_f64_knnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f64_f64_f64_f64_mknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/2D/device_contraction_bilinear_m2_n2_k2_xdl_c_shuffle_f64_f64_f64_f64_mnnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_bf16_bf16_bf16_bf16_compute_f32_kknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_bf16_bf16_bf16_bf16_compute_f32_knnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_bf16_bf16_bf16_bf16_compute_f32_mknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_bf16_bf16_bf16_bf16_compute_f32_mnnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f16_f16_f16_f16_compute_f32_kknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f16_f16_f16_f16_compute_f32_knnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f16_f16_f16_f16_compute_f32_mknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f16_f16_f16_f16_compute_f32_mnnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_f32_compute_bf16_kknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_f32_compute_bf16_knnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_f32_compute_bf16_mknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_f32_compute_bf16_mnnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_f32_compute_f16_kknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_f32_compute_f16_knnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_f32_compute_f16_mknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_f32_compute_f16_mnnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_f32_kknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_f32_knnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_f32_mknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_f32_mnnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f64_f64_f64_f64_compute_f32_kknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f64_f64_f64_f64_compute_f32_knnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f64_f64_f64_f64_compute_f32_mknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f64_f64_f64_f64_compute_f32_mnnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f64_f64_f64_f64_kknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f64_f64_f64_f64_knnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f64_f64_f64_f64_mknn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/6D/device_contraction_bilinear_m6_n6_k6_xdl_c_shuffle_f64_f64_f64_f64_mnnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_bilinear/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_bf16_bf16_bf16_compute_f32_kkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_bf16_bf16_bf16_compute_f32_knn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_bf16_bf16_bf16_compute_f32_mkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_bf16_bf16_bf16_compute_f32_mnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f16_f16_f16_compute_f32_kkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f16_f16_f16_compute_f32_knn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f16_f16_f16_compute_f32_mkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f16_f16_f16_compute_f32_mnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_compute_bf16_kkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_compute_bf16_knn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_compute_bf16_mkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_compute_bf16_mnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_compute_f16_kkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_compute_f16_knn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_compute_f16_mkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_compute_f16_mnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_kkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_knn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_mkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f32_f32_f32_mnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f64_f64_f64_compute_f32_kkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f64_f64_f64_compute_f32_knn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f64_f64_f64_compute_f32_mkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f64_f64_f64_compute_f32_mnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f64_f64_f64_kkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f64_f64_f64_knn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f64_f64_f64_mkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/2D/device_contraction_scale_m2_n2_k2_xdl_c_shuffle_f64_f64_f64_mnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_bf16_bf16_bf16_compute_f32_kkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_bf16_bf16_bf16_compute_f32_knn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_bf16_bf16_bf16_compute_f32_mkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_bf16_bf16_bf16_compute_f32_mnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f16_f16_f16_compute_f32_kkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f16_f16_f16_compute_f32_knn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f16_f16_f16_compute_f32_mkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f16_f16_f16_compute_f32_mnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_compute_bf16_kkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_compute_bf16_knn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_compute_bf16_mkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_compute_bf16_mnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_compute_f16_kkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_compute_f16_knn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_compute_f16_mkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_compute_f16_mnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_kkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_knn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_mkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f32_f32_f32_mnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f64_f64_f64_compute_f32_kkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f64_f64_f64_compute_f32_knn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f64_f64_f64_compute_f32_mkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f64_f64_f64_compute_f32_mnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f64_f64_f64_kkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f64_f64_f64_knn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f64_f64_f64_mkn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/6D/device_contraction_scale_m6_n6_k6_xdl_c_shuffle_f64_f64_f64_mnn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/contraction_scale/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv1d_bwd_data/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv1d_bwd_data/device_conv1d_bwd_data_xdl_nwc_kxc_nwk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv1d_bwd_data/device_conv1d_bwd_data_xdl_nwc_kxc_nwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv1d_bwd_data/device_conv1d_bwd_data_xdl_nwc_kxc_nwk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv1d_bwd_data/device_conv1d_bwd_data_xdl_nwc_kxc_nwk_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv2d_bwd_data/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv2d_bwd_data/device_conv2d_bwd_data_dl_nhwc_kyxc_nhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv2d_bwd_data/device_conv2d_bwd_data_dl_nhwc_kyxc_nhwk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv2d_bwd_data/device_conv2d_bwd_data_dl_nhwc_kyxc_nhwk_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv2d_bwd_data/device_conv2d_bwd_data_xdl_nhwc_kyxc_nhwk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv2d_bwd_data/device_conv2d_bwd_data_xdl_nhwc_kyxc_nhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv2d_bwd_data/device_conv2d_bwd_data_xdl_nhwc_kyxc_nhwk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv2d_bwd_data/device_conv2d_bwd_data_xdl_nhwc_kyxc_nhwk_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv2d_fwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv2d_fwd/device_conv2d_fwd_xdl_c_shuffle_nhwc_kyxc_nhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv2d_fwd/device_conv2d_fwd_xdl_nhwc_kyxc_nhwk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv2d_fwd/device_conv2d_fwd_xdl_nhwc_kyxc_nhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv2d_fwd/device_conv2d_fwd_xdl_nhwc_kyxc_nhwk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv2d_fwd/device_conv2d_fwd_xdl_nhwc_kyxc_nhwk_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv2d_fwd_bias_relu/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv2d_fwd_bias_relu/device_conv2d_fwd_xdl_c_shuffle_bias_relu_nhwc_kyxc_nhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv2d_fwd_bias_relu_add/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv2d_fwd_bias_relu_add/device_conv2d_fwd_xdl_c_shuffle_bias_relu_add_nhwc_kyxc_nhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv3d_bwd_data/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv3d_bwd_data/device_conv3d_bwd_data_xdl_ndhwc_kzyxc_ndhwk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv3d_bwd_data/device_conv3d_bwd_data_xdl_ndhwc_kzyxc_ndhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv3d_bwd_data/device_conv3d_bwd_data_xdl_ndhwc_kzyxc_ndhwk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/conv3d_bwd_data/device_conv3d_bwd_data_xdl_ndhwc_kzyxc_ndhwk_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/elementwise/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/elementwise/device_normalize_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/elementwise_normalization/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/elementwise_normalization/device_elementwise_normalization_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dl_f16_f16_f16_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dl_f16_f16_f16_km_kn_mn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dl_f16_f16_f16_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dl_f16_f16_f16_km_nk_mn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dl_f16_f16_f16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dl_f16_f16_f16_mk_kn_mn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dl_f16_f16_f16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dl_f16_f16_f16_mk_nk_mn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dl_f32_f32_f32_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dl_f32_f32_f32_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dl_f32_f32_f32_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dl_f32_f32_f32_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dl_i8_i8_i8_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dl_i8_i8_i8_km_kn_mn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dl_i8_i8_i8_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dl_i8_i8_i8_km_nk_mn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dl_i8_i8_i8_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dl_i8_i8_i8_mk_kn_mn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dl_i8_i8_i8_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dl_i8_i8_i8_mk_nk_mn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dpp_f16_f16_f16_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dpp_f16_f16_f16_km_kn_mn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dpp_f16_f16_f16_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dpp_f16_f16_f16_km_nk_mn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dpp_f16_f16_f16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dpp_f16_f16_f16_mk_kn_mn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dpp_f16_f16_f16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_dpp_f16_f16_f16_mk_nk_mn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_wmma_bf16_bf16_bf16_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_wmma_bf16_bf16_bf16_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_wmma_bf16_bf16_bf16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_wmma_bf16_bf16_bf16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_wmma_f16_f16_f16_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_wmma_f16_f16_f16_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_wmma_f16_f16_f16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_wmma_f16_f16_f16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_wmma_int8_int8_int8_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_wmma_int8_int8_int8_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_wmma_int8_int8_int8_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_wmma_int8_int8_int8_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_2_stage_f16_f16_f16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_bf16_bf16_bf16_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_bf16_bf16_bf16_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_bf16_bf16_bf16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_bf16_bf16_bf16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_f16_f16_f16_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_f16_f16_f16_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_f16_f16_f16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_f16_f16_f16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_f16_f8_f16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_f16_f8_f16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_f32_f32_f32_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_f32_f32_f32_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_f32_f32_f32_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_f32_f32_f32_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_fp8_fp8_fp8_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_fp8_fp8_fp8_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_fp8_fp8_fp8_mk_kn_mn_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_fp8_fp8_fp8_mk_kn_mn_v1_interwave_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_fp8_fp8_fp8_mk_kn_mn_v1_interwave_padded_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_fp8_fp8_fp8_mk_kn_mn_v1_padded_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_fp8_fp8_fp8_mk_kn_mn_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_fp8_fp8_fp8_mk_kn_mn_v2_padded_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_fp8_fp8_fp8_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_i8_i8_i8_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_i8_i8_i8_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_i8_i8_i8_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_i8_i8_i8_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_lds_direct_load_f16_f16_f16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_lds_direct_load_f32_f32_f32_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_lds_direct_load_f32_f32_f32_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_lds_direct_load_f32_f32_f32_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_c_shuffle_lds_direct_load_f32_f32_f32_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/km_kn_mn_add_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/km_kn_mn_default_pipeline_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/km_kn_mn_default_pipeline_v2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/km_kn_mn_default_pipeline_v2_opt_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/km_kn_mn_interwave_pipeline_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/km_kn_mn_irregular_default_pipeline_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/km_kn_mn_irregular_default_pipeline_v2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/km_kn_mn_irregular_interwave_pipeline_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/km_nk_mn_add_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/km_nk_mn_default_pipeline_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/km_nk_mn_default_pipeline_v2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/km_nk_mn_default_pipeline_v2_opt_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/km_nk_mn_interwave_pipeline_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/km_nk_mn_irregular_default_pipeline_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/km_nk_mn_irregular_default_pipeline_v2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/km_nk_mn_irregular_interwave_pipeline_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/mk_kn_mn_add_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/mk_kn_mn_default_pipeline_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/mk_kn_mn_default_pipeline_v2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/mk_kn_mn_default_pipeline_v2_opt_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/mk_kn_mn_interwave_pipeline_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/mk_kn_mn_irregular_default_pipeline_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/mk_kn_mn_irregular_default_pipeline_v2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/mk_kn_mn_irregular_interwave_pipeline_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/mk_nk_mn_add_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/mk_nk_mn_default_pipeline_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/mk_nk_mn_default_pipeline_v2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/mk_nk_mn_default_pipeline_v2_opt_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/mk_nk_mn_interwave_pipeline_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/mk_nk_mn_irregular_default_pipeline_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/mk_nk_mn_irregular_default_pipeline_v2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f16_f16_f16/mk_nk_mn_irregular_interwave_pipeline_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f32_f32_f32_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f32_f32_f32_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f32_f32_f32_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f32_f32_f32_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f64_f64_f64_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f64_f64_f64_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f64_f64_f64_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm/device_gemm_xdl_f64_f64_f64_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_ab_scale/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_ab_scale/device_gemm_ab_scale_xdl_f8_f8_bf16/device_gemm_ab_scale_xdl_f8_f8_bf16_mk_nk_mn_128_128_128.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_ab_scale/device_gemm_ab_scale_xdl_f8_f8_bf16/device_gemm_ab_scale_xdl_f8_f8_bf16_mk_nk_mn_128_128_128_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_ab_scale/device_gemm_ab_scale_xdl_f8_f8_bf16/device_gemm_ab_scale_xdl_f8_f8_bf16_mk_nk_mn_128_128_128_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_ab_scale/device_gemm_ab_scale_xdl_f8_f8_bf16/device_gemm_ab_scale_xdl_f8_f8_bf16_mk_nk_mn_128_128_128_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_ab_scale/device_gemm_ab_scale_xdl_f8_f8_bf16/device_gemm_ab_scale_xdl_f8_f8_bf16_mk_nk_mn_128_128_128_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add/device_gemm_add_xdl_c_shuffle_bf16_i8_bf16_bf16_mk_kn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add/device_gemm_add_xdl_c_shuffle_f16_i8_f16_f16_mk_kn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_add_fastgelu/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_add_fastgelu/device_gemm_add_add_fastgelu_xdl_c_shuffle_f16_f16_f16_f16_f16_km_kn_mn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_add_fastgelu/device_gemm_add_add_fastgelu_xdl_c_shuffle_f16_f16_f16_f16_f16_km_nk_mn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_add_fastgelu/device_gemm_add_add_fastgelu_xdl_c_shuffle_f16_f16_f16_f16_f16_mk_kn_mn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_add_fastgelu/device_gemm_add_add_fastgelu_xdl_c_shuffle_f16_f16_f16_f16_f16_mk_nk_mn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_fastgelu/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_fastgelu/device_gemm_add_fastgelu_xdl_c_shuffle_bf16_i8_bf16_bf16_mk_kn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_fastgelu/device_gemm_add_fastgelu_xdl_c_shuffle_f16_f16_f16_f16_km_kn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_fastgelu/device_gemm_add_fastgelu_xdl_c_shuffle_f16_f16_f16_f16_km_nk_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_fastgelu/device_gemm_add_fastgelu_xdl_c_shuffle_f16_f16_f16_f16_mk_kn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_fastgelu/device_gemm_add_fastgelu_xdl_c_shuffle_f16_f16_f16_f16_mk_nk_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_fastgelu/device_gemm_add_fastgelu_xdl_c_shuffle_f16_i8_f16_f16_mk_kn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_multiply/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_multiply/device_gemm_add_multiply_xdl_c_shuffle_f16_f16_f16_f16_f16_km_kn_mn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_multiply/device_gemm_add_multiply_xdl_c_shuffle_f16_f16_f16_f16_f16_km_nk_mn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_multiply/device_gemm_add_multiply_xdl_c_shuffle_f16_f16_f16_f16_f16_mk_kn_mn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_multiply/device_gemm_add_multiply_xdl_c_shuffle_f16_f16_f16_f16_f16_mk_nk_mn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_relu/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_relu/device_gemm_add_relu_xdl_c_shuffle_bf16_i8_bf16_bf16_mk_kn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_relu/device_gemm_add_relu_xdl_c_shuffle_f16_i8_f16_f16_mk_kn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_relu_add_layernorm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_relu_add_layernorm/device_gemm_add_relu_add_xdl_c_shuffle_layernorm_f16_km_kn_mn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_relu_add_layernorm/device_gemm_add_relu_add_xdl_c_shuffle_layernorm_f16_km_nk_mn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_relu_add_layernorm/device_gemm_add_relu_add_xdl_c_shuffle_layernorm_f16_mk_kn_mn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_relu_add_layernorm/device_gemm_add_relu_add_xdl_c_shuffle_layernorm_f16_mk_nk_mn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_silu/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_silu/device_gemm_add_silu_xdl_c_shuffle_bf16_i8_bf16_bf16_mk_kn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_add_silu/device_gemm_add_silu_xdl_c_shuffle_f16_i8_f16_f16_mk_kn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_b_scale/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_b_scale/device_gemm_b_scale_wmma_f16_i4_f16/device_gemm_b_scale_wmma_f16_i4_f16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_b_scale/device_gemm_b_scale_wmma_f16_i4_f16/device_gemm_b_scale_wmma_f16_i4_f16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_b_scale/device_gemm_b_scale_xdl_f16_i4_f16/device_gemm_b_scale_xdl_f16_i4_f16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_b_scale/device_gemm_b_scale_xdl_f16_i4_f16/device_gemm_b_scale_xdl_f16_i4_f16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_bias_add_reduce/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_bias_add_reduce/device_gemm_bias_add_mean_squaremean_xdl_cshuffle_f16_f16_f16_f32_f32_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_bias_add_reduce/device_gemm_bias_add_mean_squaremean_xdl_cshuffle_f16_f16_f16_f32_f32_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_bias_add_reduce/device_gemm_bias_add_mean_squaremean_xdl_cshuffle_f16_f16_f16_f32_f32_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_bias_add_reduce/device_gemm_bias_add_mean_squaremean_xdl_cshuffle_f16_f16_f16_f32_f32_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_bilinear/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_bilinear/device_gemm_bilinear_wmma_c_shuffle_i8_i8_i8_i8_km_kn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_bilinear/device_gemm_bilinear_wmma_c_shuffle_i8_i8_i8_i8_km_nk_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_bilinear/device_gemm_bilinear_wmma_c_shuffle_i8_i8_i8_i8_mk_kn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_bilinear/device_gemm_bilinear_wmma_c_shuffle_i8_i8_i8_i8_mk_nk_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_bilinear/device_gemm_bilinear_xdl_c_shuffle_f16_f16_f16_f16_km_kn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_bilinear/device_gemm_bilinear_xdl_c_shuffle_f16_f16_f16_f16_km_nk_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_bilinear/device_gemm_bilinear_xdl_c_shuffle_f16_f16_f16_f16_mk_kn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_bilinear/device_gemm_bilinear_xdl_c_shuffle_f16_f16_f16_f16_mk_nk_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_blockscale_wp/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_blockscale_wp/device_gemm_blockscale_wp_xdl_f8_f8_bf16/device_gemm_blockscale_wp_xdl_f8_f8_bf16_mk_nk_mn_128_128_128.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_blockscale_wp/device_gemm_blockscale_wp_xdl_f8_f8_bf16/device_gemm_blockscale_wp_xdl_f8_f8_bf16_mk_nk_mn_128_128_128_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_blockscale_wp/device_gemm_blockscale_wp_xdl_f8_f8_bf16/device_gemm_blockscale_wp_xdl_f8_f8_bf16_mk_nk_mn_128_128_128_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_blockscale_wp/device_gemm_blockscale_wp_xdl_f8_f8_bf16/device_gemm_blockscale_wp_xdl_f8_f8_bf16_mk_nk_mn_128_128_128_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_blockscale_wp/device_gemm_blockscale_wp_xdl_f8_f8_bf16/device_gemm_blockscale_wp_xdl_f8_f8_bf16_mk_nk_mn_128_128_128_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_fastgelu/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_fastgelu/device_gemm_fastgelu_xdl_c_shuffle_f16_f16_f16_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_fastgelu/device_gemm_fastgelu_xdl_c_shuffle_f16_f16_f16_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_fastgelu/device_gemm_fastgelu_xdl_c_shuffle_f16_f16_f16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_fastgelu/device_gemm_fastgelu_xdl_c_shuffle_f16_f16_f16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multi_abd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multi_abd/device_gemm_xdl_multi_abd_bf16_i8_bf16_mk_kn_mn_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multi_abd/device_gemm_xdl_multi_abd_bf16_i8_bf16_mk_kn_mn_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multi_abd/device_gemm_xdl_multi_abd_bf16_i8_bf16_mk_nk_mn_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multi_abd/device_gemm_xdl_multi_abd_bias_bf16_i8_bf16_mk_kn_mn_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multi_abd/device_gemm_xdl_multi_abd_bias_gelu_bf16_i8_bf16_mk_kn_mn_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multi_abd/device_gemm_xdl_multi_abd_bias_gelu_bf16_i8_bf16_mk_nk_mn_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multi_abd/device_gemm_xdl_multi_abd_gelu_bf16_i8_bf16_mk_kn_mn_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multi_abd/device_gemm_xdl_multi_abd_multiply_bf16_i8_bf16_mk_kn_mn_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multi_abd/device_gemm_xdl_multi_abd_multiply_bias_bf16_i8_bf16_mk_kn_mn_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multi_abd/device_gemm_xdl_multi_abd_multiply_bias_gelu_bf16_i8_bf16_mk_kn_mn_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multi_abd/device_gemm_xdl_multi_abd_multiply_gelu_bf16_i8_bf16_mk_kn_mn_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_add/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_add/device_gemm_multiply_add_xdl_c_shuffle_f16_f16_f16_f16_f16_mk_kn_mn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_add/device_gemm_multiply_add_xdl_c_shuffle_f16_f16_f16_f16_f16_mk_nk_mn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_add/device_gemm_multiply_add_xdl_c_shuffle_f16_f8_f32_f32_f16_mk_kn_mn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_add/device_gemm_multiply_add_xdl_c_shuffle_f16_f8_f32_f32_f16_mk_nk_mn_mn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_bf16/device_gemm_multiply_multiply_xdl_f8_f8_bf16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_bf16/device_gemm_multiply_multiply_xdl_f8_f8_bf16_mk_nk_mn_comp_default_instance_part1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_bf16/device_gemm_multiply_multiply_xdl_f8_f8_bf16_mk_nk_mn_comp_default_instance_part2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_bf16/device_gemm_multiply_multiply_xdl_f8_f8_bf16_mk_nk_mn_comp_kpadding_instance_part1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_bf16/device_gemm_multiply_multiply_xdl_f8_f8_bf16_mk_nk_mn_comp_kpadding_instance_part2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_bf16/device_gemm_multiply_multiply_xdl_f8_f8_bf16_mk_nk_mn_comp_mfma16x16_default_instance_part1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_bf16/device_gemm_multiply_multiply_xdl_f8_f8_bf16_mk_nk_mn_comp_mfma16x16_default_instance_part2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_bf16/device_gemm_multiply_multiply_xdl_f8_f8_bf16_mk_nk_mn_comp_mfma16x16_default_instance_part3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_bf16/device_gemm_multiply_multiply_xdl_f8_f8_bf16_mk_nk_mn_comp_mfma16x16_kpadding_instance_part1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_bf16/device_gemm_multiply_multiply_xdl_f8_f8_bf16_mk_nk_mn_comp_mfma16x16_kpadding_instance_part2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_bf16/device_gemm_multiply_multiply_xdl_f8_f8_bf16_mk_nk_mn_comp_mfma16x16_kpadding_instance_part3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_bf16/device_gemm_multiply_multiply_xdl_f8_f8_bf16_mk_nk_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_bf16/device_gemm_multiply_multiply_xdl_f8_f8_bf16_mk_nk_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_bf16/device_gemm_multiply_multiply_xdl_f8_f8_bf16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_bf16/device_gemm_multiply_multiply_xdl_f8_f8_bf16_mk_nk_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_f16/device_gemm_multiply_multiply_xdl_f8_f8_f16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_f16/device_gemm_multiply_multiply_xdl_f8_f8_f16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_f16/device_gemm_multiply_multiply_xdl_f8_f8_f16_mk_nk_mn_comp_default_instance_part1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_f16/device_gemm_multiply_multiply_xdl_f8_f8_f16_mk_nk_mn_comp_default_instance_part2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_f16/device_gemm_multiply_multiply_xdl_f8_f8_f16_mk_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_f16/device_gemm_multiply_multiply_xdl_f8_f8_f16_mk_nk_mn_comp_kpadding_instance_part1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_f16/device_gemm_multiply_multiply_xdl_f8_f8_f16_mk_nk_mn_comp_kpadding_instance_part2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_f16/device_gemm_multiply_multiply_xdl_f8_f8_f16_mk_nk_mn_comp_mfma16x16_default_instance_part1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_f16/device_gemm_multiply_multiply_xdl_f8_f8_f16_mk_nk_mn_comp_mfma16x16_default_instance_part2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_f16/device_gemm_multiply_multiply_xdl_f8_f8_f16_mk_nk_mn_comp_mfma16x16_default_instance_part3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_f16/device_gemm_multiply_multiply_xdl_f8_f8_f16_mk_nk_mn_comp_mfma16x16_kpadding_instance_part1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_f16/device_gemm_multiply_multiply_xdl_f8_f8_f16_mk_nk_mn_comp_mfma16x16_kpadding_instance_part2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_f16/device_gemm_multiply_multiply_xdl_f8_f8_f16_mk_nk_mn_comp_mfma16x16_kpadding_instance_part3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_f16/device_gemm_multiply_multiply_xdl_f8_f8_f16_mk_nk_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_f16/device_gemm_multiply_multiply_xdl_f8_f8_f16_mk_nk_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_f16/device_gemm_multiply_multiply_xdl_f8_f8_f16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_f8_f8_f16/device_gemm_multiply_multiply_xdl_f8_f8_f16_mk_nk_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_i8_i8_bf16/device_gemm_multiply_multiply_xdl_i8_i8_bf16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_i8_i8_bf16/device_gemm_multiply_multiply_xdl_i8_i8_bf16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_i8_i8_bf16/device_gemm_multiply_multiply_xdl_i8_i8_bf16_mk_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_i8_i8_bf16/device_gemm_multiply_multiply_xdl_i8_i8_bf16_mk_nk_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_i8_i8_bf16/device_gemm_multiply_multiply_xdl_i8_i8_bf16_mk_nk_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_i8_i8_bf16/device_gemm_multiply_multiply_xdl_i8_i8_bf16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_i8_i8_bf16/device_gemm_multiply_multiply_xdl_i8_i8_bf16_mk_nk_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_i8_i8_f16/device_gemm_multiply_multiply_xdl_i8_i8_f16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_i8_i8_f16/device_gemm_multiply_multiply_xdl_i8_i8_f16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_i8_i8_f16/device_gemm_multiply_multiply_xdl_i8_i8_f16_mk_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_i8_i8_f16/device_gemm_multiply_multiply_xdl_i8_i8_f16_mk_nk_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_i8_i8_f16/device_gemm_multiply_multiply_xdl_i8_i8_f16_mk_nk_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_i8_i8_f16/device_gemm_multiply_multiply_xdl_i8_i8_f16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply/device_gemm_multiply_multiply_xdl_i8_i8_f16/device_gemm_multiply_multiply_xdl_i8_i8_f16_mk_nk_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_bf16/device_gemm_multiply_multiply_wp_xdl_f8_f8_bf16_mk_mfma16x16_mn_compute_default_instance_p1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_bf16/device_gemm_multiply_multiply_wp_xdl_f8_f8_bf16_mk_mfma16x16_mn_compute_default_instance_p2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_bf16/device_gemm_multiply_multiply_wp_xdl_f8_f8_bf16_mk_mfma16x16_mn_compute_default_instance_p3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_bf16/device_gemm_multiply_multiply_wp_xdl_f8_f8_bf16_mk_mfma16x16_mn_compute_default_instance_p4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_bf16/device_gemm_multiply_multiply_wp_xdl_f8_f8_bf16_mk_mfma16x16_mn_compute_default_instance_p5.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_bf16/device_gemm_multiply_multiply_wp_xdl_f8_f8_bf16_mk_mfma16x16_mn_compute_default_instance_p6.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_bf16/device_gemm_multiply_multiply_wp_xdl_f8_f8_bf16_mk_mfma_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_bf16/device_gemm_multiply_multiply_wp_xdl_f8_f8_bf16_mk_mfma_mn_compute_default_instance_p1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_bf16/device_gemm_multiply_multiply_wp_xdl_f8_f8_bf16_mk_mfma_mn_compute_default_instance_p2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_bf16/device_gemm_multiply_multiply_wp_xdl_f8_f8_bf16_mk_mfma_mn_p1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_bf16/device_gemm_multiply_multiply_wp_xdl_f8_f8_bf16_mk_mfma_mn_p1_default_instance_v2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_bf16/device_gemm_multiply_multiply_wp_xdl_f8_f8_bf16_mk_mfma_mn_p2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_bf16/device_gemm_multiply_multiply_wp_xdl_f8_f8_bf16_mk_mfma_mn_p2_default_instance_v2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_bf16/device_gemm_multiply_multiply_wp_xdl_f8_f8_bf16_mk_mfma_mn_p3_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_bf16/device_gemm_multiply_multiply_wp_xdl_f8_f8_bf16_mk_mfma_mn_p3_default_instance_v2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_bf16/device_gemm_multiply_multiply_wp_xdl_f8_f8_bf16_mk_mfma_mn_p4_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_bf16/device_gemm_multiply_multiply_wp_xdl_f8_f8_bf16_mk_mfma_mn_p4_default_instance_v2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_bf16/device_gemm_multiply_multiply_wp_xdl_f8_f8_bf16_mk_mfma_mn_p5_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_bf16/device_gemm_multiply_multiply_wp_xdl_f8_f8_bf16_mk_mfma_mn_p5_default_instance_v2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_f16/device_gemm_multiply_multiply_wp_xdl_f8_f8_f16_mk_mfma16x16_mn_compute_default_instance_p1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_f16/device_gemm_multiply_multiply_wp_xdl_f8_f8_f16_mk_mfma16x16_mn_compute_default_instance_p2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_f16/device_gemm_multiply_multiply_wp_xdl_f8_f8_f16_mk_mfma16x16_mn_compute_default_instance_p3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_f16/device_gemm_multiply_multiply_wp_xdl_f8_f8_f16_mk_mfma16x16_mn_compute_default_instance_p4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_f16/device_gemm_multiply_multiply_wp_xdl_f8_f8_f16_mk_mfma16x16_mn_compute_default_instance_p5.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_f16/device_gemm_multiply_multiply_wp_xdl_f8_f8_f16_mk_mfma16x16_mn_compute_default_instance_p6.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_f16/device_gemm_multiply_multiply_wp_xdl_f8_f8_f16_mk_mfma_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_f16/device_gemm_multiply_multiply_wp_xdl_f8_f8_f16_mk_mfma_mn_compute_default_instance_p1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_f16/device_gemm_multiply_multiply_wp_xdl_f8_f8_f16_mk_mfma_mn_compute_default_instance_p2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_f16/device_gemm_multiply_multiply_wp_xdl_f8_f8_f16_mk_mfma_mn_p1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_f16/device_gemm_multiply_multiply_wp_xdl_f8_f8_f16_mk_mfma_mn_p1_default_instance_v2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_f16/device_gemm_multiply_multiply_wp_xdl_f8_f8_f16_mk_mfma_mn_p2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_f16/device_gemm_multiply_multiply_wp_xdl_f8_f8_f16_mk_mfma_mn_p2_default_instance_v2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_f16/device_gemm_multiply_multiply_wp_xdl_f8_f8_f16_mk_mfma_mn_p3_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_f16/device_gemm_multiply_multiply_wp_xdl_f8_f8_f16_mk_mfma_mn_p3_default_instance_v2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_f16/device_gemm_multiply_multiply_wp_xdl_f8_f8_f16_mk_mfma_mn_p4_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_f16/device_gemm_multiply_multiply_wp_xdl_f8_f8_f16_mk_mfma_mn_p4_default_instance_v2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_f16/device_gemm_multiply_multiply_wp_xdl_f8_f8_f16_mk_mfma_mn_p5_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_multiply_multiply_wp/f8_f8_f16/device_gemm_multiply_multiply_wp_xdl_f8_f8_f16_mk_mfma_mn_p5_default_instance_v2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_mx/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_mx/device_gemm_mx_xdl_bf6_bf6_bf16/device_gemm_mx_xdl_bf6_bf6_bf16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_mx/device_gemm_mx_xdl_bf6_bf6_bf16/device_gemm_mx_xdl_bf6_bf6_bf16_mk_nk_mn_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_mx/device_gemm_mx_xdl_bf8_f8_f16/device_gemm_mx_xdl_bf8_f8_f16_mk_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_mx/device_gemm_mx_xdl_bf8_f8_f16/device_gemm_mx_xdl_bf8_f8_f16_mk_kn_mn_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_mx/device_gemm_mx_xdl_f4_f4_f16/device_gemm_mx_xdl_f4_f4_f16_mk_mfma_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_mx/device_gemm_mx_xdl_f4_f4_f16/device_gemm_mx_xdl_f4_f4_f16_mk_mfma_mn_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_mx/device_gemm_mx_xdl_f4_f4_f16/device_gemm_mx_xdl_f4_f4_f16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_mx/device_gemm_mx_xdl_f4_f4_f16/device_gemm_mx_xdl_f4_f4_f16_mk_nk_mn_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_mx/device_gemm_mx_xdl_f6_f6_f16/device_gemm_mx_xdl_f6_f6_f16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_mx/device_gemm_mx_xdl_f6_f6_f16/device_gemm_mx_xdl_f6_f6_f16_mk_nk_mn_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_mx/device_gemm_mx_xdl_f8_f8_bf16/device_gemm_mx_xdl_f8_f8_bf16_km_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_mx/device_gemm_mx_xdl_f8_f8_bf16/device_gemm_mx_xdl_f8_f8_bf16_km_nk_mn_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_mx/device_gemm_mx_xdl_f8_f8_bf16/device_gemm_mx_xdl_f8_f8_bf16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_mx/device_gemm_mx_xdl_f8_f8_bf16/device_gemm_mx_xdl_f8_f8_bf16_mk_nk_mn_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_mx/device_gemm_mx_xdl_f8_f8_f16/device_gemm_mx_xdl_f8_f8_f16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_mx/device_gemm_mx_xdl_f8_f8_f16/device_gemm_mx_xdl_f8_f8_f16_mk_nk_mn_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_reduce/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_reduce/device_gemm_reduce_xdl_cshuffle_f16_f16_f16_f32_f32_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_reduce/device_gemm_reduce_xdl_cshuffle_f16_f16_f16_f32_f32_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_reduce/device_gemm_reduce_xdl_cshuffle_f16_f16_f16_f32_f32_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_reduce/device_gemm_reduce_xdl_cshuffle_f16_f16_f16_f32_f32_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_f16_f16_comp_fp8_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_f16_f16_comp_fp8_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_f16_f16_comp_fp8_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_f16_f16_comp_fp8_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_f16_f16_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_f16_f16_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_f16_f16_mk_kn_mn_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_f16_f16_mk_kn_mn_v1_interwave_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_f16_f16_mk_kn_mn_v1_interwave_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_f16_f16_mk_kn_mn_v1_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_f16_f16_mk_kn_mn_v2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_f16_f16_mk_kn_mn_v2_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_f16_f16_mk_nk_mn_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_f16_f16_mk_nk_mn_v1_interwave_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_f16_f16_mk_nk_mn_v1_interwave_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_f16_f16_mk_nk_mn_v1_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_f16_f16_mk_nk_mn_v2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_f16_f16_mk_nk_mn_v2_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_fp8_f16_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_fp8_f16_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_fp8_f16_mk_kn_mn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_fp8_f16_mk_kn_mn_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_fp8_f16_mk_kn_mn_v1_interwave_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_fp8_f16_mk_kn_mn_v2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_fp8_f16_mk_nk_mn_kpb128_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_fp8_f16_mk_nk_mn_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_fp8_f16_mk_nk_mn_v1_interwave_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f16_fp8_f16_mk_nk_mn_v2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f32_f32_f32_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f32_f32_f32_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f32_f32_f32_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_f32_f32_f32_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_fp8_f16_f16_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_fp8_f16_f16_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_fp8_f16_f16_mk_kn_mn_v1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_fp8_f16_f16_mk_kn_mn_v1_interwave_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_fp8_f16_f16_mk_kn_mn_v2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_fp8_f16_f16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_splitk/device_gemm_xdl_splitk_lds_direct_load_f16_f16_f16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_streamk/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_streamk/device_gemm_xdl_streamk_f16_f16_f16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_bf16_bf16/device_gemm_wmma_universal_bf16_bf16_bf16_km_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_bf16_bf16/device_gemm_wmma_universal_bf16_bf16_bf16_km_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_bf16_bf16/device_gemm_wmma_universal_bf16_bf16_bf16_km_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_bf16_bf16/device_gemm_wmma_universal_bf16_bf16_bf16_km_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_bf16_bf16/device_gemm_wmma_universal_bf16_bf16_bf16_km_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_bf16_bf16/device_gemm_wmma_universal_bf16_bf16_bf16_km_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_bf16_bf16/device_gemm_wmma_universal_bf16_bf16_bf16_km_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_bf16_bf16/device_gemm_wmma_universal_bf16_bf16_bf16_km_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_bf16_bf16/device_gemm_wmma_universal_bf16_bf16_bf16_km_nk_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_bf16_bf16/device_gemm_wmma_universal_bf16_bf16_bf16_km_nk_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_bf16_bf16/device_gemm_wmma_universal_bf16_bf16_bf16_mk_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_bf16_bf16/device_gemm_wmma_universal_bf16_bf16_bf16_mk_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_bf16_bf16/device_gemm_wmma_universal_bf16_bf16_bf16_mk_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_bf16_bf16/device_gemm_wmma_universal_bf16_bf16_bf16_mk_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_bf16_bf16/device_gemm_wmma_universal_bf16_bf16_bf16_mk_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_bf16_bf16/device_gemm_wmma_universal_bf16_bf16_bf16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_bf16_bf16/device_gemm_wmma_universal_bf16_bf16_bf16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_bf16_bf16/device_gemm_wmma_universal_bf16_bf16_bf16_mk_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_bf16_bf16/device_gemm_wmma_universal_bf16_bf16_bf16_mk_nk_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_bf16_bf16/device_gemm_wmma_universal_bf16_bf16_bf16_mk_nk_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_i4_bf16/device_gemm_wmma_universal_bf16_i4_bf16_km_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_i4_bf16/device_gemm_wmma_universal_bf16_i4_bf16_km_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_i4_bf16/device_gemm_wmma_universal_bf16_i4_bf16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_bf16_i4_bf16/device_gemm_wmma_universal_bf16_i4_bf16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f16_f16/device_gemm_wmma_universal_f16_f16_f16_km_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f16_f16/device_gemm_wmma_universal_f16_f16_f16_km_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f16_f16/device_gemm_wmma_universal_f16_f16_f16_km_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f16_f16/device_gemm_wmma_universal_f16_f16_f16_km_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f16_f16/device_gemm_wmma_universal_f16_f16_f16_km_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f16_f16/device_gemm_wmma_universal_f16_f16_f16_km_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f16_f16/device_gemm_wmma_universal_f16_f16_f16_km_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f16_f16/device_gemm_wmma_universal_f16_f16_f16_km_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f16_f16/device_gemm_wmma_universal_f16_f16_f16_km_nk_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f16_f16/device_gemm_wmma_universal_f16_f16_f16_km_nk_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f16_f16/device_gemm_wmma_universal_f16_f16_f16_mk_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f16_f16/device_gemm_wmma_universal_f16_f16_f16_mk_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f16_f16/device_gemm_wmma_universal_f16_f16_f16_mk_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f16_f16/device_gemm_wmma_universal_f16_f16_f16_mk_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f16_f16/device_gemm_wmma_universal_f16_f16_f16_mk_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f16_f16/device_gemm_wmma_universal_f16_f16_f16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f16_f16/device_gemm_wmma_universal_f16_f16_f16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f16_f16/device_gemm_wmma_universal_f16_f16_f16_mk_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f16_f16/device_gemm_wmma_universal_f16_f16_f16_mk_nk_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f16_f16/device_gemm_wmma_universal_f16_f16_f16_mk_nk_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f8_f16/device_gemm_wmma_universal_f16_f8_f16_km_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f8_f16/device_gemm_wmma_universal_f16_f8_f16_km_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f8_f16/device_gemm_wmma_universal_f16_f8_f16_km_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f8_f16/device_gemm_wmma_universal_f16_f8_f16_km_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f8_f16/device_gemm_wmma_universal_f16_f8_f16_km_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f8_f16/device_gemm_wmma_universal_f16_f8_f16_km_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f8_f16/device_gemm_wmma_universal_f16_f8_f16_km_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f8_f16/device_gemm_wmma_universal_f16_f8_f16_km_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f8_f16/device_gemm_wmma_universal_f16_f8_f16_km_nk_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f8_f16/device_gemm_wmma_universal_f16_f8_f16_km_nk_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f8_f16/device_gemm_wmma_universal_f16_f8_f16_mk_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f8_f16/device_gemm_wmma_universal_f16_f8_f16_mk_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f8_f16/device_gemm_wmma_universal_f16_f8_f16_mk_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f8_f16/device_gemm_wmma_universal_f16_f8_f16_mk_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f8_f16/device_gemm_wmma_universal_f16_f8_f16_mk_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f8_f16/device_gemm_wmma_universal_f16_f8_f16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f8_f16/device_gemm_wmma_universal_f16_f8_f16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f8_f16/device_gemm_wmma_universal_f16_f8_f16_mk_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f8_f16/device_gemm_wmma_universal_f16_f8_f16_mk_nk_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_f8_f16/device_gemm_wmma_universal_f16_f8_f16_mk_nk_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_i4_f16/device_gemm_wmma_universal_f16_i4_f16_km_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_i4_f16/device_gemm_wmma_universal_f16_i4_f16_km_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_i4_f16/device_gemm_wmma_universal_f16_i4_f16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f16_i4_f16/device_gemm_wmma_universal_f16_i4_f16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f16_f16/device_gemm_wmma_universal_f8_f16_f16_km_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f16_f16/device_gemm_wmma_universal_f8_f16_f16_km_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f16_f16/device_gemm_wmma_universal_f8_f16_f16_km_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f16_f16/device_gemm_wmma_universal_f8_f16_f16_km_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f16_f16/device_gemm_wmma_universal_f8_f16_f16_km_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f16_f16/device_gemm_wmma_universal_f8_f16_f16_km_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f16_f16/device_gemm_wmma_universal_f8_f16_f16_km_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f16_f16/device_gemm_wmma_universal_f8_f16_f16_km_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f16_f16/device_gemm_wmma_universal_f8_f16_f16_km_nk_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f16_f16/device_gemm_wmma_universal_f8_f16_f16_km_nk_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f16_f16/device_gemm_wmma_universal_f8_f16_f16_mk_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f16_f16/device_gemm_wmma_universal_f8_f16_f16_mk_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f16_f16/device_gemm_wmma_universal_f8_f16_f16_mk_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f16_f16/device_gemm_wmma_universal_f8_f16_f16_mk_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f16_f16/device_gemm_wmma_universal_f8_f16_f16_mk_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f16_f16/device_gemm_wmma_universal_f8_f16_f16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f16_f16/device_gemm_wmma_universal_f8_f16_f16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f16_f16/device_gemm_wmma_universal_f8_f16_f16_mk_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f16_f16/device_gemm_wmma_universal_f8_f16_f16_mk_nk_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f16_f16/device_gemm_wmma_universal_f8_f16_f16_mk_nk_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f8_bf16/device_gemm_wmma_universal_f8_f8_bf16_mk_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f8_bf16/device_gemm_wmma_universal_f8_f8_bf16_mk_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f8_bf16/device_gemm_wmma_universal_f8_f8_bf16_mk_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f8_bf16/device_gemm_wmma_universal_f8_f8_bf16_mk_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f8_bf16/device_gemm_wmma_universal_f8_f8_bf16_mk_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f8_bf16/device_gemm_wmma_universal_f8_f8_bf16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f8_bf16/device_gemm_wmma_universal_f8_f8_bf16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f8_bf16/device_gemm_wmma_universal_f8_f8_bf16_mk_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f8_bf16/device_gemm_wmma_universal_f8_f8_bf16_mk_nk_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_wmma_universal_f8_f8_bf16/device_gemm_wmma_universal_f8_f8_bf16_mk_nk_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_kn_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_kn_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_kn_mn_mem_v1_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_kn_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_kn_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_kn_mn_mem_v2_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_nk_mn_comp_mkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_nk_mn_comp_mpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_nk_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_nk_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_nk_mn_mem_v1_mkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_nk_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_km_nk_mn_mem_v2_mkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_kn_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_kn_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_kn_mn_mem_v1_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_kn_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_kn_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_kn_mn_mem_v2_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_nk_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_nk_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_nk_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_i4_bf16/device_gemm_xdl_universal_bf16_i4_bf16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_bf16_i4_bf16/device_gemm_xdl_universal_bf16_i4_bf16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn_mem_v1_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn_mem_v2_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_nk_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_nk_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_nk_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f8_f16/device_gemm_xdl_universal_f16_f8_f16_mk_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f8_f16/device_gemm_xdl_universal_f16_f8_f16_mk_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f8_f16/device_gemm_xdl_universal_f16_f8_f16_mk_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f8_f16/device_gemm_xdl_universal_f16_f8_f16_mk_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f8_f16/device_gemm_xdl_universal_f16_f8_f16_mk_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f8_f16/device_gemm_xdl_universal_f16_f8_f16_mk_kn_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f8_f16/device_gemm_xdl_universal_f16_f8_f16_mk_kn_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f8_f16/device_gemm_xdl_universal_f16_f8_f16_mk_kn_mn_mem_v1_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f8_f16/device_gemm_xdl_universal_f16_f8_f16_mk_kn_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f8_f16/device_gemm_xdl_universal_f16_f8_f16_mk_kn_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f8_f16/device_gemm_xdl_universal_f16_f8_f16_mk_kn_mn_mem_v2_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f8_f16/device_gemm_xdl_universal_f16_f8_f16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f8_f16/device_gemm_xdl_universal_f16_f8_f16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f8_f16/device_gemm_xdl_universal_f16_f8_f16_mk_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f8_f16/device_gemm_xdl_universal_f16_f8_f16_mk_nk_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f8_f16/device_gemm_xdl_universal_f16_f8_f16_mk_nk_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f8_f16/device_gemm_xdl_universal_f16_f8_f16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_f8_f16/device_gemm_xdl_universal_f16_f8_f16_mk_nk_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_i4_f16/device_gemm_xdl_universal_f16_i4_f16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f16_i4_f16/device_gemm_xdl_universal_f16_i4_f16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f16_f16/device_gemm_xdl_universal_f8_f16_f16_mk_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f16_f16/device_gemm_xdl_universal_f8_f16_f16_mk_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f16_f16/device_gemm_xdl_universal_f8_f16_f16_mk_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f16_f16/device_gemm_xdl_universal_f8_f16_f16_mk_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f16_f16/device_gemm_xdl_universal_f8_f16_f16_mk_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f16_f16/device_gemm_xdl_universal_f8_f16_f16_mk_kn_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f16_f16/device_gemm_xdl_universal_f8_f16_f16_mk_kn_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f16_f16/device_gemm_xdl_universal_f8_f16_f16_mk_kn_mn_mem_v1_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f16_f16/device_gemm_xdl_universal_f8_f16_f16_mk_kn_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f16_f16/device_gemm_xdl_universal_f8_f16_f16_mk_kn_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f16_f16/device_gemm_xdl_universal_f8_f16_f16_mk_kn_mn_mem_v2_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f16_f16/device_gemm_xdl_universal_f8_f16_f16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f16_f16/device_gemm_xdl_universal_f8_f16_f16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f16_f16/device_gemm_xdl_universal_f8_f16_f16_mk_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f16_f16/device_gemm_xdl_universal_f8_f16_f16_mk_nk_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f16_f16/device_gemm_xdl_universal_f8_f16_f16_mk_nk_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f16_f16/device_gemm_xdl_universal_f8_f16_f16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f16_f16/device_gemm_xdl_universal_f8_f16_f16_mk_nk_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f8_bf16/device_gemm_xdl_universal_f8_f8_bf16_mk_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f8_bf16/device_gemm_xdl_universal_f8_f8_bf16_mk_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f8_bf16/device_gemm_xdl_universal_f8_f8_bf16_mk_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f8_bf16/device_gemm_xdl_universal_f8_f8_bf16_mk_kn_mn_comp_nkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f8_bf16/device_gemm_xdl_universal_f8_f8_bf16_mk_kn_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f8_bf16/device_gemm_xdl_universal_f8_f8_bf16_mk_kn_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f8_bf16/device_gemm_xdl_universal_f8_f8_bf16_mk_kn_mn_mem_v1_nkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f8_bf16/device_gemm_xdl_universal_f8_f8_bf16_mk_kn_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f8_bf16/device_gemm_xdl_universal_f8_f8_bf16_mk_kn_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f8_bf16/device_gemm_xdl_universal_f8_f8_bf16_mk_kn_mn_mem_v2_nkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f8_bf16/device_gemm_xdl_universal_f8_f8_bf16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f8_bf16/device_gemm_xdl_universal_f8_f8_bf16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f8_bf16/device_gemm_xdl_universal_f8_f8_bf16_mk_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f8_bf16/device_gemm_xdl_universal_f8_f8_bf16_mk_nk_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f8_bf16/device_gemm_xdl_universal_f8_f8_bf16_mk_nk_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f8_bf16/device_gemm_xdl_universal_f8_f8_bf16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal/device_gemm_xdl_universal_f8_f8_bf16/device_gemm_xdl_universal_f8_f8_bf16_mk_nk_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_batched/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_batched/device_batched_gemm_xdl_universal_bf16_bf16_bf16/device_batched_gemm_xdl_universal_bf16_bf16_bf16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_batched/device_batched_gemm_xdl_universal_bf16_bf16_bf16/device_batched_gemm_xdl_universal_bf16_bf16_bf16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_batched/device_batched_gemm_xdl_universal_bf16_bf16_bf16/device_batched_gemm_xdl_universal_bf16_bf16_bf16_mk_nk_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_batched/device_batched_gemm_xdl_universal_bf16_bf16_bf16/device_batched_gemm_xdl_universal_bf16_bf16_bf16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_batched/device_batched_gemm_xdl_universal_f8_f8_bf16/device_batched_gemm_xdl_universal_f8_f8_bf16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_batched/device_batched_gemm_xdl_universal_f8_f8_bf16/device_batched_gemm_xdl_universal_f8_f8_bf16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_batched/device_batched_gemm_xdl_universal_f8_f8_bf16/device_batched_gemm_xdl_universal_f8_f8_bf16_mk_nk_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_batched/device_batched_gemm_xdl_universal_f8_f8_bf16/device_batched_gemm_xdl_universal_f8_f8_bf16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_bf16/device_gemm_xdl_universal_preshuffle_f8_f8_bf16_mk_mfma16x16_nk_mn_comp_default_instance_p1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_bf16/device_gemm_xdl_universal_preshuffle_f8_f8_bf16_mk_mfma16x16_nk_mn_comp_default_instance_p2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_bf16/device_gemm_xdl_universal_preshuffle_f8_f8_bf16_mk_mfma16x16_nk_mn_comp_default_instance_p3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_bf16/device_gemm_xdl_universal_preshuffle_f8_f8_bf16_mk_mfma16x16_nk_mn_comp_default_instance_p4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_bf16/device_gemm_xdl_universal_preshuffle_f8_f8_bf16_mk_mfma16x16_nk_mn_comp_default_instance_p5.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_bf16/device_gemm_xdl_universal_preshuffle_f8_f8_bf16_mk_mfma16x16_nk_mn_comp_default_instance_p6.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_bf16/device_gemm_xdl_universal_preshuffle_f8_f8_bf16_mk_mfma_mn_p1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_bf16/device_gemm_xdl_universal_preshuffle_f8_f8_bf16_mk_mfma_mn_p2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_bf16/device_gemm_xdl_universal_preshuffle_f8_f8_bf16_mk_mfma_mn_p3_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_bf16/device_gemm_xdl_universal_preshuffle_f8_f8_bf16_mk_mfma_mn_p4_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_bf16/device_gemm_xdl_universal_preshuffle_f8_f8_bf16_mk_mfma_mn_p5_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_bf16/device_gemm_xdl_universal_preshuffle_f8_f8_bf16_mk_mfma_nk_mn_comp_default_instance_p1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_bf16/device_gemm_xdl_universal_preshuffle_f8_f8_bf16_mk_mfma_nk_mn_comp_default_instance_p2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_bf16/device_gemm_xdl_universal_preshuffle_f8_f8_bf16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_bf16/device_gemm_xdl_universal_preshuffle_f8_f8_f8_bf16_mk_mfma32x32_mn_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_bf16/device_gemm_xdl_universal_preshuffle_f8_f8_f8_bf16_mk_mfma32x32_mn_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_f16/device_gemm_universal_preshuffle_xdl_f8_f8_f16_mk_mfma16x16_mn_compute_default_instance_p1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_f16/device_gemm_universal_preshuffle_xdl_f8_f8_f16_mk_mfma16x16_mn_compute_default_instance_p2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_f16/device_gemm_universal_preshuffle_xdl_f8_f8_f16_mk_mfma16x16_mn_compute_default_instance_p3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_f16/device_gemm_universal_preshuffle_xdl_f8_f8_f16_mk_mfma16x16_mn_compute_default_instance_p4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_f16/device_gemm_universal_preshuffle_xdl_f8_f8_f16_mk_mfma16x16_mn_compute_default_instance_p5.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_f16/device_gemm_universal_preshuffle_xdl_f8_f8_f16_mk_mfma16x16_mn_compute_default_instance_p6.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_f16/device_gemm_universal_preshuffle_xdl_f8_f8_f16_mk_mfma_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_f16/device_gemm_universal_preshuffle_xdl_f8_f8_f16_mk_mfma_mn_compute_default_instance_p1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_f16/device_gemm_universal_preshuffle_xdl_f8_f8_f16_mk_mfma_mn_compute_default_instance_p2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_f16/device_gemm_universal_preshuffle_xdl_f8_f8_f16_mk_mfma_mn_p1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_f16/device_gemm_universal_preshuffle_xdl_f8_f8_f16_mk_mfma_mn_p1_default_instance_v2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_f16/device_gemm_universal_preshuffle_xdl_f8_f8_f16_mk_mfma_mn_p2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_f16/device_gemm_universal_preshuffle_xdl_f8_f8_f16_mk_mfma_mn_p2_default_instance_v2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_f16/device_gemm_universal_preshuffle_xdl_f8_f8_f16_mk_mfma_mn_p3_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_f16/device_gemm_universal_preshuffle_xdl_f8_f8_f16_mk_mfma_mn_p3_default_instance_v2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_f16/device_gemm_universal_preshuffle_xdl_f8_f8_f16_mk_mfma_mn_p4_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_f16/device_gemm_universal_preshuffle_xdl_f8_f8_f16_mk_mfma_mn_p4_default_instance_v2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_f16/device_gemm_universal_preshuffle_xdl_f8_f8_f16_mk_mfma_mn_p5_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_preshuffle/device_gemm_xdl_universal_preshuffle_f8_f8_f16/device_gemm_universal_preshuffle_xdl_f8_f8_f16_mk_mfma_mn_p5_default_instance_v2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_kn_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_kn_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_bf16_bf16_bf16/device_gemm_xdl_universal_bf16_bf16_bf16_mk_kn_mn_mem_v2_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_bf16_i8_bf16/device_gemm_xdl_universal_bf16_i8_bf16_mk_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_bf16_i8_bf16/device_gemm_xdl_universal_bf16_i8_bf16_mk_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_bf16_i8_bf16/device_gemm_xdl_universal_bf16_i8_bf16_mk_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_bf16_i8_bf16/device_gemm_xdl_universal_bf16_i8_bf16_mk_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_bf16_i8_bf16/device_gemm_xdl_universal_bf16_i8_bf16_mk_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_bf16_i8_bf16/device_gemm_xdl_universal_bf16_i8_bf16_mk_kn_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_bf16_i8_bf16/device_gemm_xdl_universal_bf16_i8_bf16_mk_kn_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_bf16_i8_bf16/device_gemm_xdl_universal_bf16_i8_bf16_mk_kn_mn_mem_v2_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn_mem_v1_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_reduce/device_gemm_xdl_universal_f16_f16_f16/device_gemm_xdl_universal_f16_f16_f16_mk_kn_mn_mem_v2_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_kn_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_kn_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_kn_mn_mem_v1_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_kn_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_kn_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_kn_mn_mem_v2_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_nk_mn_comp_mkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_nk_mn_comp_mpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_nk_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_nk_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_nk_mn_mem_v1_mkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_nk_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_km_nk_mn_mem_v2_mkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_kn_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_kn_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_kn_mn_mem_v1_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_kn_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_kn_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_kn_mn_mem_v2_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_nk_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_nk_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_nk_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_nk_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_nk_mn_mem_v1_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_nk_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_bf16_bf16_bf16/device_gemm_xdl_universal_streamk_bf16_bf16_bf16_mk_nk_mn_mem_v2_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_kn_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_kn_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_kn_mn_mem_v1_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_kn_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_kn_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_kn_mn_mem_v2_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_nk_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_nk_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_nk_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_nk_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_nk_mn_mem_v1_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_nk_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f16_f16/device_gemm_xdl_universal_streamk_f16_f16_f16_mk_nk_mn_mem_v2_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_kn_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_kn_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_kn_mn_mem_v1_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_kn_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_kn_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_kn_mn_mem_v2_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_nk_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_nk_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_nk_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_nk_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_nk_mn_mem_v1_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_nk_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f16_f8_f16/device_gemm_xdl_universal_streamk_f16_f8_f16_mk_nk_mn_mem_v2_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_kn_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_kn_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_kn_mn_mem_v1_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_kn_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_kn_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_kn_mn_mem_v2_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_nk_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_nk_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_nk_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_nk_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_nk_mn_mem_v1_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_nk_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f16_f16/device_gemm_xdl_universal_streamk_f8_f16_f16_mk_nk_mn_mem_v2_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f8_bf16/device_gemm_xdl_universal_streamk_f8_f8_bf16_mk_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f8_bf16/device_gemm_xdl_universal_streamk_f8_f8_bf16_mk_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f8_bf16/device_gemm_xdl_universal_streamk_f8_f8_bf16_mk_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f8_bf16/device_gemm_xdl_universal_streamk_f8_f8_bf16_mk_kn_mn_comp_nkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f8_bf16/device_gemm_xdl_universal_streamk_f8_f8_bf16_mk_kn_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f8_bf16/device_gemm_xdl_universal_streamk_f8_f8_bf16_mk_kn_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f8_bf16/device_gemm_xdl_universal_streamk_f8_f8_bf16_mk_kn_mn_mem_v1_nkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f8_bf16/device_gemm_xdl_universal_streamk_f8_f8_bf16_mk_kn_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f8_bf16/device_gemm_xdl_universal_streamk_f8_f8_bf16_mk_kn_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f8_bf16/device_gemm_xdl_universal_streamk_f8_f8_bf16_mk_kn_mn_mem_v2_nkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f8_bf16/device_gemm_xdl_universal_streamk_f8_f8_bf16_mk_nk_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f8_bf16/device_gemm_xdl_universal_streamk_f8_f8_bf16_mk_nk_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f8_bf16/device_gemm_xdl_universal_streamk_f8_f8_bf16_mk_nk_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f8_bf16/device_gemm_xdl_universal_streamk_f8_f8_bf16_mk_nk_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f8_bf16/device_gemm_xdl_universal_streamk_f8_f8_bf16_mk_nk_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f8_bf16/device_gemm_xdl_universal_streamk_f8_f8_bf16_mk_nk_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/gemm_universal_streamk/device_gemm_xdl_universal_streamk_f8_f8_bf16/device_gemm_xdl_universal_streamk_f8_f8_bf16_mk_nk_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv1d_bwd_weight/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv1d_bwd_weight/dl/device_grouped_conv1d_bwd_weight_dl_gnwc_gkxc_gnwk_bf16_f32_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv1d_bwd_weight/dl/device_grouped_conv1d_bwd_weight_dl_gnwc_gkxc_gnwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv1d_bwd_weight/dl/device_grouped_conv1d_bwd_weight_dl_gnwc_gkxc_gnwk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv1d_bwd_weight/dl/device_grouped_conv1d_bwd_weight_dl_nwgc_gkxc_nwgk_bf16_f32_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv1d_bwd_weight/dl/device_grouped_conv1d_bwd_weight_dl_nwgc_gkxc_nwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv1d_bwd_weight/dl/device_grouped_conv1d_bwd_weight_dl_nwgc_gkxc_nwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv1d_bwd_weight/xdl/device_grouped_conv1d_bwd_weight_xdl_gnwc_gkxc_gnwk_bf16_f32_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv1d_bwd_weight/xdl/device_grouped_conv1d_bwd_weight_xdl_gnwc_gkxc_gnwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv1d_bwd_weight/xdl/device_grouped_conv1d_bwd_weight_xdl_gnwc_gkxc_gnwk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv1d_fwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv1d_fwd/xdl/device_grouped_conv1d_fwd_xdl_gnwc_gkxc_gnwk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv1d_fwd/xdl/device_grouped_conv1d_fwd_xdl_gnwc_gkxc_gnwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv1d_fwd/xdl/device_grouped_conv1d_fwd_xdl_gnwc_gkxc_gnwk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv1d_fwd/xdl/device_grouped_conv1d_fwd_xdl_gnwc_gkxc_gnwk_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/wmma/device_grouped_conv2d_bwd_data_wmma_gnhwc_gkyxc_gnhwk_f16_1x1s1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/wmma/device_grouped_conv2d_bwd_data_wmma_gnhwc_gkyxc_gnhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/wmma/device_grouped_conv2d_bwd_data_wmma_gnhwc_gkyxc_gnhwk_i8_1x1s1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/wmma/device_grouped_conv2d_bwd_data_wmma_gnhwc_gkyxc_gnhwk_i8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/wmma/device_grouped_conv2d_bwd_data_wmma_nhwgc_gkyxc_nhwgk_f16_1x1s1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/wmma/device_grouped_conv2d_bwd_data_wmma_nhwgc_gkyxc_nhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/wmma/device_grouped_conv2d_bwd_data_wmma_nhwgc_gkyxc_nhwgk_i8_1x1s1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/wmma/device_grouped_conv2d_bwd_data_wmma_nhwgc_gkyxc_nhwgk_i8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_gnhwc_gkyxc_gnhwk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_gnhwc_gkyxc_gnhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_gnhwc_gkyxc_gnhwk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_ngchw_gkcyx_ngkhw_bf16_16_16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_ngchw_gkcyx_ngkhw_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_ngchw_gkcyx_ngkhw_bf16_vec_transpose_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_ngchw_gkcyx_ngkhw_f16_16_16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_ngchw_gkcyx_ngkhw_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_ngchw_gkcyx_ngkhw_f16_vec_transpose_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_ngchw_gkcyx_ngkhw_f32_16_16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_ngchw_gkcyx_ngkhw_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_ngchw_gkcyx_ngkhw_f32_vec_transpose_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_ngchw_gkyxc_ngkhw_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_ngchw_gkyxc_ngkhw_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_ngchw_gkyxc_ngkhw_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_nhwgc_gkyxc_nhwgk_bf16_16_16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_nhwgc_gkyxc_nhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_nhwgc_gkyxc_nhwgk_f16_16_16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_nhwgc_gkyxc_nhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_nhwgc_gkyxc_nhwgk_f32_16_16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_data/xdl/device_grouped_conv2d_bwd_data_xdl_nhwgc_gkyxc_nhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/dl/device_grouped_conv2d_bwd_weight_dl_gnhwc_gkyxc_gnhwk_bf16_f32_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/dl/device_grouped_conv2d_bwd_weight_dl_gnhwc_gkyxc_gnhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/dl/device_grouped_conv2d_bwd_weight_dl_gnhwc_gkyxc_gnhwk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/dl/device_grouped_conv2d_bwd_weight_dl_nhwgc_gkyxc_nhwgk_bf16_f32_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/dl/device_grouped_conv2d_bwd_weight_dl_nhwgc_gkyxc_nhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/dl/device_grouped_conv2d_bwd_weight_dl_nhwgc_gkyxc_nhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/gnhwc_gkyxc_gnhwk/device_grouped_conv2d_bwd_weight_xdl_gnhwc_gkyxc_gnhwk_bf16_f32_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/gnhwc_gkyxc_gnhwk/device_grouped_conv2d_bwd_weight_xdl_gnhwc_gkyxc_gnhwk_f16_default_pipev1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/gnhwc_gkyxc_gnhwk/device_grouped_conv2d_bwd_weight_xdl_gnhwc_gkyxc_gnhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/gnhwc_gkyxc_gnhwk/device_grouped_conv2d_bwd_weight_xdl_gnhwc_gkyxc_gnhwk_f16_pad0_pipev1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/gnhwc_gkyxc_gnhwk/device_grouped_conv2d_bwd_weight_xdl_gnhwc_gkyxc_gnhwk_f32_default_pipev1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/gnhwc_gkyxc_gnhwk/device_grouped_conv2d_bwd_weight_xdl_gnhwc_gkyxc_gnhwk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/gnhwc_gkyxc_gnhwk/device_grouped_conv2d_bwd_weight_xdl_gnhwc_gkyxc_gnhwk_f32_pad0_pipev1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/ngchw_gkcyx_ngkhw/device_grouped_conv2d_bwd_weight_two_stage_xdl_ngchw_gkcyx_ngkhw_bf16_pipev1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/ngchw_gkcyx_ngkhw/device_grouped_conv2d_bwd_weight_two_stage_xdl_ngchw_gkcyx_ngkhw_bf16_pipev1_part2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/ngchw_gkcyx_ngkhw/device_grouped_conv2d_bwd_weight_two_stage_xdl_ngchw_gkcyx_ngkhw_bf16_pipev2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/ngchw_gkcyx_ngkhw/device_grouped_conv2d_bwd_weight_two_stage_xdl_ngchw_gkcyx_ngkhw_bf16_pipev5_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/ngchw_gkcyx_ngkhw/device_grouped_conv2d_bwd_weight_two_stage_xdl_ngchw_gkcyx_ngkhw_f16_pipev1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/ngchw_gkcyx_ngkhw/device_grouped_conv2d_bwd_weight_two_stage_xdl_ngchw_gkcyx_ngkhw_f16_pipev1_part2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/ngchw_gkcyx_ngkhw/device_grouped_conv2d_bwd_weight_two_stage_xdl_ngchw_gkcyx_ngkhw_f16_pipev2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/ngchw_gkcyx_ngkhw/device_grouped_conv2d_bwd_weight_two_stage_xdl_ngchw_gkcyx_ngkhw_f16_pipev5_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/ngchw_gkcyx_ngkhw/device_grouped_conv2d_bwd_weight_xdl_ngchw_gkcyx_ngkhw_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/ngchw_gkcyx_ngkhw/device_grouped_conv2d_bwd_weight_xdl_ngchw_gkcyx_ngkhw_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/ngchw_gkcyx_ngkhw/device_grouped_conv2d_bwd_weight_xdl_ngchw_gkcyx_ngkhw_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/ngchw_gkyxc_ngkhw/device_grouped_conv2d_bwd_weight_two_stage_xdl_ngchw_gkyxc_ngkhw_bf16_pipev1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/ngchw_gkyxc_ngkhw/device_grouped_conv2d_bwd_weight_two_stage_xdl_ngchw_gkyxc_ngkhw_f16_pipev1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/ngchw_gkyxc_ngkhw/device_grouped_conv2d_bwd_weight_xdl_ngchw_gkyxc_ngkhw_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_two_stage_xdl_nhwgc_gkyxc_nhwgk_bf16_pipev1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_two_stage_xdl_nhwgc_gkyxc_nhwgk_bf16_pipev1_part2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_two_stage_xdl_nhwgc_gkyxc_nhwgk_bf16_pipev2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_two_stage_xdl_nhwgc_gkyxc_nhwgk_bf16_pipev2_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_two_stage_xdl_nhwgc_gkyxc_nhwgk_bf16_pipev5_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_two_stage_xdl_nhwgc_gkyxc_nhwgk_bf16_pipev5_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_two_stage_xdl_nhwgc_gkyxc_nhwgk_f16_pipev1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_two_stage_xdl_nhwgc_gkyxc_nhwgk_f16_pipev1_part2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_two_stage_xdl_nhwgc_gkyxc_nhwgk_f16_pipev2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_two_stage_xdl_nhwgc_gkyxc_nhwgk_f16_pipev2_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_two_stage_xdl_nhwgc_gkyxc_nhwgk_f16_pipev5_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_two_stage_xdl_nhwgc_gkyxc_nhwgk_f16_pipev5_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_xdl_nhwgc_gkyxc_nhwgk_bf16_default_pipev2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_xdl_nhwgc_gkyxc_nhwgk_bf16_default_pipev5_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_xdl_nhwgc_gkyxc_nhwgk_bf16_f32_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_xdl_nhwgc_gkyxc_nhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_xdl_nhwgc_gkyxc_nhwgk_bf16_pad0_pipev2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_xdl_nhwgc_gkyxc_nhwgk_bf16_pad0_pipev5_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_xdl_nhwgc_gkyxc_nhwgk_f16_default_pipev2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_xdl_nhwgc_gkyxc_nhwgk_f16_default_pipev5_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_xdl_nhwgc_gkyxc_nhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_xdl_nhwgc_gkyxc_nhwgk_f16_pad0_pipev2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_xdl_nhwgc_gkyxc_nhwgk_f16_pad0_pipev5_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_xdl_nhwgc_gkyxc_nhwgk_f32_default_pipev2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_xdl_nhwgc_gkyxc_nhwgk_f32_default_pipev5_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_xdl_nhwgc_gkyxc_nhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_xdl_nhwgc_gkyxc_nhwgk_f32_pad0_pipev2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_bwd_weight/xdl/nhwgc_gkyxc_nhwgk/device_grouped_conv2d_bwd_weight_xdl_nhwgc_gkyxc_nhwgk_f32_pad0_pipev5_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/dl/device_grouped_conv2d_fwd_dl_gnhwc_gkyxc_gnhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/dl/device_grouped_conv2d_fwd_dl_gnhwc_gkyxc_gnhwk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/dl/device_grouped_conv2d_fwd_dl_nhwgc_gkyxc_nhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/dl/device_grouped_conv2d_fwd_dl_nhwgc_gkyxc_nhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/wmma/device_grouped_conv2d_fwd_wmma_gnhwc_gkyxc_gnhwk_f16_1x1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/wmma/device_grouped_conv2d_fwd_wmma_gnhwc_gkyxc_gnhwk_f16_1x1s1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/wmma/device_grouped_conv2d_fwd_wmma_gnhwc_gkyxc_gnhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/wmma/device_grouped_conv2d_fwd_wmma_gnhwc_gkyxc_gnhwk_f16_oddc_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/wmma/device_grouped_conv2d_fwd_wmma_gnhwc_gkyxc_gnhwk_i8_1x1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/wmma/device_grouped_conv2d_fwd_wmma_gnhwc_gkyxc_gnhwk_i8_1x1s1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/wmma/device_grouped_conv2d_fwd_wmma_gnhwc_gkyxc_gnhwk_i8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/wmma/device_grouped_conv2d_fwd_wmma_gnhwc_gkyxc_gnhwk_i8_oddc_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/wmma/device_grouped_conv2d_fwd_wmma_nhwgc_gkyxc_nhwgk_f16_1x1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/wmma/device_grouped_conv2d_fwd_wmma_nhwgc_gkyxc_nhwgk_f16_1x1s1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/wmma/device_grouped_conv2d_fwd_wmma_nhwgc_gkyxc_nhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/wmma/device_grouped_conv2d_fwd_wmma_nhwgc_gkyxc_nhwgk_f16_oddc_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/wmma/device_grouped_conv2d_fwd_wmma_nhwgc_gkyxc_nhwgk_i8_1x1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/wmma/device_grouped_conv2d_fwd_wmma_nhwgc_gkyxc_nhwgk_i8_1x1s1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/wmma/device_grouped_conv2d_fwd_wmma_nhwgc_gkyxc_nhwgk_i8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/wmma/device_grouped_conv2d_fwd_wmma_nhwgc_gkyxc_nhwgk_i8_oddc_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/comp/device_grouped_conv2d_fwd_xdl_ngchw_gkcyx_ngkhw_bf16_comp_2x_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/comp/device_grouped_conv2d_fwd_xdl_ngchw_gkcyx_ngkhw_bf16_comp_instance.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/comp/device_grouped_conv2d_fwd_xdl_ngchw_gkcyx_ngkhw_bf16_comp_part2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/comp/device_grouped_conv2d_fwd_xdl_ngchw_gkcyx_ngkhw_f16_comp_2x_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/comp/device_grouped_conv2d_fwd_xdl_ngchw_gkcyx_ngkhw_f16_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/comp/device_grouped_conv2d_fwd_xdl_ngchw_gkcyx_ngkhw_f16_comp_part2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/comp/device_grouped_conv2d_fwd_xdl_ngchw_gkcyx_ngkhw_f32_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/comp/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_bf16_comp_2x_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/comp/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_bf16_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/comp/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_bf16_comp_part2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/comp/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_f16_comp_2x_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/comp/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_f16_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/comp/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_f16_comp_part2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/comp/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_f32_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/comp/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_int8_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/device_grouped_conv2d_fwd_xdl_gnhwc_gkyxc_gnhwk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/device_grouped_conv2d_fwd_xdl_gnhwc_gkyxc_gnhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/device_grouped_conv2d_fwd_xdl_gnhwc_gkyxc_gnhwk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/device_grouped_conv2d_fwd_xdl_ngchw_gkcyx_ngkhw_bf16_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/device_grouped_conv2d_fwd_xdl_ngchw_gkcyx_ngkhw_bf16_instance.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/device_grouped_conv2d_fwd_xdl_ngchw_gkcyx_ngkhw_f16_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/device_grouped_conv2d_fwd_xdl_ngchw_gkcyx_ngkhw_f16_instance.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/device_grouped_conv2d_fwd_xdl_ngchw_gkcyx_ngkhw_f32_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/device_grouped_conv2d_fwd_xdl_ngchw_gkcyx_ngkhw_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/device_grouped_conv2d_fwd_xdl_ngchw_gkyxc_ngkhw_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/device_grouped_conv2d_fwd_xdl_ngchw_gkyxc_ngkhw_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/device_grouped_conv2d_fwd_xdl_ngchw_gkyxc_ngkhw_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/device_grouped_conv2d_fwd_xdl_ngchw_gkyxc_ngkhw_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_bf16_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_f16_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_f32_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/large_tensor/device_grouped_conv2d_fwd_xdl_large_tensor_nhwgc_gkyxc_nhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/large_tensor/device_grouped_conv2d_fwd_xdl_large_tensor_nhwgc_gkyxc_nhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/large_tensor/device_grouped_conv2d_fwd_xdl_large_tensor_nhwgc_gkyxc_nhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/large_tensor/device_grouped_conv2d_fwd_xdl_large_tensor_nhwgc_gkyxc_nhwgk_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/mem/device_grouped_conv2d_fwd_xdl_ngchw_gkcyx_ngkhw_bf16_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/mem/device_grouped_conv2d_fwd_xdl_ngchw_gkcyx_ngkhw_bf16_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/mem/device_grouped_conv2d_fwd_xdl_ngchw_gkcyx_ngkhw_f16_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/mem/device_grouped_conv2d_fwd_xdl_ngchw_gkcyx_ngkhw_f16_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/mem/device_grouped_conv2d_fwd_xdl_ngchw_gkcyx_ngkhw_f32_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/mem/device_grouped_conv2d_fwd_xdl_ngchw_gkcyx_ngkhw_f32_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/mem/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_bf16_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/mem/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_bf16_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/mem/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_f16_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/mem/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_f16_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/mem/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_f32_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/mem/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_f32_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/mem/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_int8_mem_inter_instance.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/mem/device_grouped_conv2d_fwd_xdl_nhwgc_gkyxc_nhwgk_int8_mem_intra_instance.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/merged_groups/device_grouped_conv2d_fwd_xdl_merged_groups_ngchw_gkcyx_ngkhw_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/merged_groups/device_grouped_conv2d_fwd_xdl_merged_groups_ngchw_gkcyx_ngkhw_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/merged_groups/device_grouped_conv2d_fwd_xdl_merged_groups_ngchw_gkcyx_ngkhw_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/merged_groups/device_grouped_conv2d_fwd_xdl_merged_groups_nhwgc_gkyxc_nhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/merged_groups/device_grouped_conv2d_fwd_xdl_merged_groups_nhwgc_gkyxc_nhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/merged_groups/device_grouped_conv2d_fwd_xdl_merged_groups_nhwgc_gkyxc_nhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd/xdl/merged_groups/device_grouped_conv2d_fwd_xdl_merged_groups_nhwgc_gkyxc_nhwgk_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/comp/device_grouped_conv2d_fwd_bias_clamp_xdl_nhwgc_gkyxc_nhwgk_bf16_comp_2x_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/comp/device_grouped_conv2d_fwd_bias_clamp_xdl_nhwgc_gkyxc_nhwgk_bf16_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/comp/device_grouped_conv2d_fwd_bias_clamp_xdl_nhwgc_gkyxc_nhwgk_bf16_comp_part2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/comp/device_grouped_conv2d_fwd_bias_clamp_xdl_nhwgc_gkyxc_nhwgk_fp16_comp_2x_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/comp/device_grouped_conv2d_fwd_bias_clamp_xdl_nhwgc_gkyxc_nhwgk_fp16_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/comp/device_grouped_conv2d_fwd_bias_clamp_xdl_nhwgc_gkyxc_nhwgk_fp16_comp_part2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/comp/device_grouped_conv2d_fwd_bias_clamp_xdl_nhwgc_gkyxc_nhwgk_fp32_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/device_grouped_conv2d_fwd_bias_clamp_xdl_nhwgc_gkyxc_nhwgk_bf16_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/device_grouped_conv2d_fwd_bias_clamp_xdl_nhwgc_gkyxc_nhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/device_grouped_conv2d_fwd_bias_clamp_xdl_nhwgc_gkyxc_nhwgk_fp16_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/device_grouped_conv2d_fwd_bias_clamp_xdl_nhwgc_gkyxc_nhwgk_fp16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/device_grouped_conv2d_fwd_bias_clamp_xdl_nhwgc_gkyxc_nhwgk_fp32_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/device_grouped_conv2d_fwd_bias_clamp_xdl_nhwgc_gkyxc_nhwgk_fp32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/large_tensor/device_grouped_conv2d_fwd_bias_clamp_xdl_large_tensor_nhwgc_gkyxc_nhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/large_tensor/device_grouped_conv2d_fwd_bias_clamp_xdl_large_tensor_nhwgc_gkyxc_nhwgk_fp16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/large_tensor/device_grouped_conv2d_fwd_bias_clamp_xdl_large_tensor_nhwgc_gkyxc_nhwgk_fp32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/mem/device_grouped_conv2d_fwd_bias_clamp_xdl_nhwgc_gkyxc_nhwgk_bf16_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/mem/device_grouped_conv2d_fwd_bias_clamp_xdl_nhwgc_gkyxc_nhwgk_bf16_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/mem/device_grouped_conv2d_fwd_bias_clamp_xdl_nhwgc_gkyxc_nhwgk_fp16_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/mem/device_grouped_conv2d_fwd_bias_clamp_xdl_nhwgc_gkyxc_nhwgk_fp16_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/mem/device_grouped_conv2d_fwd_bias_clamp_xdl_nhwgc_gkyxc_nhwgk_fp32_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/mem/device_grouped_conv2d_fwd_bias_clamp_xdl_nhwgc_gkyxc_nhwgk_fp32_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/merged_groups/device_grouped_conv2d_fwd_bias_clamp_xdl_merged_groups_nhwgc_gkyxc_nhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/merged_groups/device_grouped_conv2d_fwd_bias_clamp_xdl_merged_groups_nhwgc_gkyxc_nhwgk_fp16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_bias_clamp/xdl/merged_groups/device_grouped_conv2d_fwd_bias_clamp_xdl_merged_groups_nhwgc_gkyxc_nhwgk_fp32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/comp/device_grouped_conv2d_fwd_clamp_xdl_nhwgc_gkyxc_nhwgk_bf16_comp_2x_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/comp/device_grouped_conv2d_fwd_clamp_xdl_nhwgc_gkyxc_nhwgk_bf16_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/comp/device_grouped_conv2d_fwd_clamp_xdl_nhwgc_gkyxc_nhwgk_bf16_comp_part2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/comp/device_grouped_conv2d_fwd_clamp_xdl_nhwgc_gkyxc_nhwgk_fp16_comp_2x_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/comp/device_grouped_conv2d_fwd_clamp_xdl_nhwgc_gkyxc_nhwgk_fp16_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/comp/device_grouped_conv2d_fwd_clamp_xdl_nhwgc_gkyxc_nhwgk_fp16_comp_part2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/comp/device_grouped_conv2d_fwd_clamp_xdl_nhwgc_gkyxc_nhwgk_fp32_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/device_grouped_conv2d_fwd_clamp_xdl_nhwgc_gkyxc_nhwgk_bf16_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/device_grouped_conv2d_fwd_clamp_xdl_nhwgc_gkyxc_nhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/device_grouped_conv2d_fwd_clamp_xdl_nhwgc_gkyxc_nhwgk_fp16_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/device_grouped_conv2d_fwd_clamp_xdl_nhwgc_gkyxc_nhwgk_fp16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/device_grouped_conv2d_fwd_clamp_xdl_nhwgc_gkyxc_nhwgk_fp32_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/device_grouped_conv2d_fwd_clamp_xdl_nhwgc_gkyxc_nhwgk_fp32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/large_tensor/device_grouped_conv2d_fwd_clamp_xdl_large_tensor_nhwgc_gkyxc_nhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/large_tensor/device_grouped_conv2d_fwd_clamp_xdl_large_tensor_nhwgc_gkyxc_nhwgk_fp16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/large_tensor/device_grouped_conv2d_fwd_clamp_xdl_large_tensor_nhwgc_gkyxc_nhwgk_fp32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/mem/device_grouped_conv2d_fwd_clamp_xdl_nhwgc_gkyxc_nhwgk_bf16_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/mem/device_grouped_conv2d_fwd_clamp_xdl_nhwgc_gkyxc_nhwgk_bf16_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/mem/device_grouped_conv2d_fwd_clamp_xdl_nhwgc_gkyxc_nhwgk_fp16_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/mem/device_grouped_conv2d_fwd_clamp_xdl_nhwgc_gkyxc_nhwgk_fp16_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/mem/device_grouped_conv2d_fwd_clamp_xdl_nhwgc_gkyxc_nhwgk_fp32_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/mem/device_grouped_conv2d_fwd_clamp_xdl_nhwgc_gkyxc_nhwgk_fp32_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/merged_groups/device_grouped_conv2d_fwd_clamp_xdl_merged_groups_nhwgc_gkyxc_nhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/merged_groups/device_grouped_conv2d_fwd_clamp_xdl_merged_groups_nhwgc_gkyxc_nhwgk_fp16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_clamp/xdl/merged_groups/device_grouped_conv2d_fwd_clamp_xdl_merged_groups_nhwgc_gkyxc_nhwgk_fp32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_dynamic_op/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_dynamic_op/xdl/device_grouped_conv2d_fwd_xdl_dynamic_op_nhwgc_gkyxc_nhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_dynamic_op/xdl/device_grouped_conv2d_fwd_xdl_dynamic_op_nhwgc_gkyxc_nhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_dynamic_op/xdl/device_grouped_conv2d_fwd_xdl_dynamic_op_nhwgc_gkyxc_nhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv2d_fwd_dynamic_op/xdl/device_grouped_conv2d_fwd_xdl_dynamic_op_nhwgc_gkyxc_nhwgk_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/wmma/device_grouped_conv3d_bwd_data_wmma_gndhwc_gkzyxc_gndhwk_f16_1x1s1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/wmma/device_grouped_conv3d_bwd_data_wmma_gndhwc_gkzyxc_gndhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/wmma/device_grouped_conv3d_bwd_data_wmma_gndhwc_gkzyxc_gndhwk_i8_1x1s1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/wmma/device_grouped_conv3d_bwd_data_wmma_gndhwc_gkzyxc_gndhwk_i8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/wmma/device_grouped_conv3d_bwd_data_wmma_ndhwgc_gkzyxc_ndhwgk_f16_1x1s1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/wmma/device_grouped_conv3d_bwd_data_wmma_ndhwgc_gkzyxc_ndhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/wmma/device_grouped_conv3d_bwd_data_wmma_ndhwgc_gkzyxc_ndhwgk_i8_1x1s1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/wmma/device_grouped_conv3d_bwd_data_wmma_ndhwgc_gkzyxc_ndhwgk_i8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_gndhwc_gkzyxc_gndhwk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_gndhwc_gkzyxc_gndhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_gndhwc_gkzyxc_gndhwk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_16_16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_ndhwgc_gkzyxc_ndhwgk_f16_16_16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_ndhwgc_gkzyxc_ndhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_ndhwgc_gkzyxc_ndhwgk_f32_16_16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_ndhwgc_gkzyxc_ndhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_ndhwgc_gkzyxc_ndhwgk_input_f16_comp_bf8_f8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_ngcdhw_gkczyx_ngkdhw_bf16_16_16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_ngcdhw_gkczyx_ngkdhw_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_ngcdhw_gkczyx_ngkdhw_bf16_vec_transpose_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_ngcdhw_gkczyx_ngkdhw_f16_16_16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_ngcdhw_gkczyx_ngkdhw_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_ngcdhw_gkczyx_ngkdhw_f16_vec_transpose_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_ngcdhw_gkczyx_ngkdhw_f32_16_16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_ngcdhw_gkczyx_ngkdhw_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_ngcdhw_gkczyx_ngkdhw_f32_vec_transpose_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_ngcdhw_gkzyxc_ngkdhw_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_ngcdhw_gkzyxc_ngkdhw_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data/xdl/device_grouped_conv3d_bwd_data_xdl_ngcdhw_gkzyxc_ngkdhw_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data_bilinear/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data_bilinear/xdl/device_grouped_conv3d_bwd_data_xdl_bilinear_ndhwgc_gkzyxc_ndhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data_bilinear/xdl/device_grouped_conv3d_bwd_data_xdl_bilinear_ndhwgc_gkzyxc_ndhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data_bilinear/xdl/device_grouped_conv3d_bwd_data_xdl_bilinear_ndhwgc_gkzyxc_ndhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data_scale/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data_scale/xdl/device_grouped_conv3d_bwd_data_xdl_scale_ndhwgc_gkzyxc_ndhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data_scale/xdl/device_grouped_conv3d_bwd_data_xdl_scale_ndhwgc_gkzyxc_ndhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_data_scale/xdl/device_grouped_conv3d_bwd_data_xdl_scale_ndhwgc_gkzyxc_ndhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/dl/device_grouped_conv3d_bwd_weight_dl_gndhwc_gkzyxc_gndhwk_bf16_f32_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/dl/device_grouped_conv3d_bwd_weight_dl_gndhwc_gkzyxc_gndhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/dl/device_grouped_conv3d_bwd_weight_dl_gndhwc_gkzyxc_gndhwk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/dl/device_grouped_conv3d_bwd_weight_dl_ndhwgc_gkzyxc_ndhwgk_bf16_f32_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/dl/device_grouped_conv3d_bwd_weight_dl_ndhwgc_gkzyxc_ndhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/dl/device_grouped_conv3d_bwd_weight_dl_ndhwgc_gkzyxc_ndhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/wmma/device_grouped_conv3d_bwd_weight_wmma_gndhwc_gkzyxc_gndhwk_f16_1x1s1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/wmma/device_grouped_conv3d_bwd_weight_wmma_gndhwc_gkzyxc_gndhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/wmma/device_grouped_conv3d_bwd_weight_wmma_gndhwc_gkzyxc_gndhwk_i8_1x1s1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/wmma/device_grouped_conv3d_bwd_weight_wmma_gndhwc_gkzyxc_gndhwk_i8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/wmma/device_grouped_conv3d_bwd_weight_wmma_ndhwgc_gkzyxc_ndhwgk_f16_1x1s1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/wmma/device_grouped_conv3d_bwd_weight_wmma_ndhwgc_gkzyxc_ndhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/wmma/device_grouped_conv3d_bwd_weight_wmma_ndhwgc_gkzyxc_ndhwgk_i8_1x1s1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/wmma/device_grouped_conv3d_bwd_weight_wmma_ndhwgc_gkzyxc_ndhwgk_i8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/gndhwc_gkzyxc_gndhwk/device_grouped_conv3d_bwd_weight_xdl_gndhwc_gkzyxc_gndhwk_bf16_f32_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/gndhwc_gkzyxc_gndhwk/device_grouped_conv3d_bwd_weight_xdl_gndhwc_gkzyxc_gndhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/gndhwc_gkzyxc_gndhwk/device_grouped_conv3d_bwd_weight_xdl_gndhwc_gkzyxc_gndhwk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_two_stage_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_pipev1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_two_stage_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_pipev1_part2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_two_stage_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_pipev2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_two_stage_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_pipev2_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_two_stage_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_pipev5_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_two_stage_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_pipev5_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_two_stage_xdl_ndhwgc_gkzyxc_ndhwgk_f16_pipev1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_two_stage_xdl_ndhwgc_gkzyxc_ndhwgk_f16_pipev1_part2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_two_stage_xdl_ndhwgc_gkzyxc_ndhwgk_f16_pipev2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_two_stage_xdl_ndhwgc_gkzyxc_ndhwgk_f16_pipev2_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_two_stage_xdl_ndhwgc_gkzyxc_ndhwgk_f16_pipev5_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_two_stage_xdl_ndhwgc_gkzyxc_ndhwgk_f16_pipev5_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_default_pipev2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_default_pipev5_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_f32_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_pad0_pipev2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_pad0_pipev5_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_xdl_ndhwgc_gkzyxc_ndhwgk_f16_comp_bf8_fp8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_xdl_ndhwgc_gkzyxc_ndhwgk_f16_default_pipev2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_xdl_ndhwgc_gkzyxc_ndhwgk_f16_default_pipev5_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_xdl_ndhwgc_gkzyxc_ndhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_xdl_ndhwgc_gkzyxc_ndhwgk_f16_pad0_pipev2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_xdl_ndhwgc_gkzyxc_ndhwgk_f16_pad0_pipev5_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_xdl_ndhwgc_gkzyxc_ndhwgk_f32_default_pipev2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_xdl_ndhwgc_gkzyxc_ndhwgk_f32_default_pipev5_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_xdl_ndhwgc_gkzyxc_ndhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_xdl_ndhwgc_gkzyxc_ndhwgk_f32_pad0_pipev2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ndhwgc_gkzyxc_ndhwgk/device_grouped_conv3d_bwd_weight_xdl_ndhwgc_gkzyxc_ndhwgk_f32_pad0_pipev5_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ngcdhw_gkczyx_ngkdhw/device_grouped_conv3d_bwd_weight_two_stage_xdl_ngcdhw_gkczyx_ngkdhw_bf16_pipev1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ngcdhw_gkczyx_ngkdhw/device_grouped_conv3d_bwd_weight_two_stage_xdl_ngcdhw_gkczyx_ngkdhw_bf16_pipev1_part2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ngcdhw_gkczyx_ngkdhw/device_grouped_conv3d_bwd_weight_two_stage_xdl_ngcdhw_gkczyx_ngkdhw_bf16_pipev2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ngcdhw_gkczyx_ngkdhw/device_grouped_conv3d_bwd_weight_two_stage_xdl_ngcdhw_gkczyx_ngkdhw_bf16_pipev5_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ngcdhw_gkczyx_ngkdhw/device_grouped_conv3d_bwd_weight_two_stage_xdl_ngcdhw_gkczyx_ngkdhw_f16_pipev1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ngcdhw_gkczyx_ngkdhw/device_grouped_conv3d_bwd_weight_two_stage_xdl_ngcdhw_gkczyx_ngkdhw_f16_pipev1_part2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ngcdhw_gkczyx_ngkdhw/device_grouped_conv3d_bwd_weight_two_stage_xdl_ngcdhw_gkczyx_ngkdhw_f16_pipev2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ngcdhw_gkczyx_ngkdhw/device_grouped_conv3d_bwd_weight_two_stage_xdl_ngcdhw_gkczyx_ngkdhw_f16_pipev5_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ngcdhw_gkczyx_ngkdhw/device_grouped_conv3d_bwd_weight_xdl_ngcdhw_gkczyx_ngkdhw_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ngcdhw_gkczyx_ngkdhw/device_grouped_conv3d_bwd_weight_xdl_ngcdhw_gkczyx_ngkdhw_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ngcdhw_gkczyx_ngkdhw/device_grouped_conv3d_bwd_weight_xdl_ngcdhw_gkczyx_ngkdhw_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ngcdhw_gkzyxc_ngkdhw/device_grouped_conv3d_bwd_weight_two_stage_xdl_ngcdhw_gkzyxc_ngkdhw_bf16_pipev1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ngcdhw_gkzyxc_ngkdhw/device_grouped_conv3d_bwd_weight_two_stage_xdl_ngcdhw_gkzyxc_ngkdhw_f16_pipev1_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight/xdl/ngcdhw_gkzyxc_ngkdhw/device_grouped_conv3d_bwd_weight_xdl_ngcdhw_gkzyxc_ngkdhw_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight_bilinear/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight_bilinear/xdl/device_grouped_conv3d_bwd_weight_xdl_bilinear_ndhwgc_gkzyxc_ndhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight_bilinear/xdl/device_grouped_conv3d_bwd_weight_xdl_bilinear_ndhwgc_gkzyxc_ndhwgk_f16_comp_bf8_fp8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight_bilinear/xdl/device_grouped_conv3d_bwd_weight_xdl_bilinear_ndhwgc_gkzyxc_ndhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight_bilinear/xdl/device_grouped_conv3d_bwd_weight_xdl_bilinear_ndhwgc_gkzyxc_ndhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight_scale/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight_scale/xdl/device_grouped_conv3d_bwd_weight_xdl_scale_ndhwgc_gkzyxc_ndhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight_scale/xdl/device_grouped_conv3d_bwd_weight_xdl_scale_ndhwgc_gkzyxc_ndhwgk_f16_comp_bf8_fp8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight_scale/xdl/device_grouped_conv3d_bwd_weight_xdl_scale_ndhwgc_gkzyxc_ndhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_bwd_weight_scale/xdl/device_grouped_conv3d_bwd_weight_xdl_scale_ndhwgc_gkzyxc_ndhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/wmma/device_grouped_conv3d_fwd_wmma_gndhwc_gkzyxc_gndhwk_f16_1x1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/wmma/device_grouped_conv3d_fwd_wmma_gndhwc_gkzyxc_gndhwk_f16_1x1s1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/wmma/device_grouped_conv3d_fwd_wmma_gndhwc_gkzyxc_gndhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/wmma/device_grouped_conv3d_fwd_wmma_gndhwc_gkzyxc_gndhwk_f16_oddc_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/wmma/device_grouped_conv3d_fwd_wmma_gndhwc_gkzyxc_gndhwk_i8_1x1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/wmma/device_grouped_conv3d_fwd_wmma_gndhwc_gkzyxc_gndhwk_i8_1x1s1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/wmma/device_grouped_conv3d_fwd_wmma_gndhwc_gkzyxc_gndhwk_i8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/wmma/device_grouped_conv3d_fwd_wmma_gndhwc_gkzyxc_gndhwk_i8_oddc_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/wmma/device_grouped_conv3d_fwd_wmma_ndhwgc_gkzyxc_ndhwgk_f16_1x1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/wmma/device_grouped_conv3d_fwd_wmma_ndhwgc_gkzyxc_ndhwgk_f16_1x1s1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/wmma/device_grouped_conv3d_fwd_wmma_ndhwgc_gkzyxc_ndhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/wmma/device_grouped_conv3d_fwd_wmma_ndhwgc_gkzyxc_ndhwgk_f16_oddc_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/wmma/device_grouped_conv3d_fwd_wmma_ndhwgc_gkzyxc_ndhwgk_i8_1x1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/wmma/device_grouped_conv3d_fwd_wmma_ndhwgc_gkzyxc_ndhwgk_i8_1x1s1p0_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/wmma/device_grouped_conv3d_fwd_wmma_ndhwgc_gkzyxc_ndhwgk_i8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/wmma/device_grouped_conv3d_fwd_wmma_ndhwgc_gkzyxc_ndhwgk_i8_oddc_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/comp/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_comp_instance.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/comp/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_f16_comp_instance.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/comp/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_f32_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/comp/device_grouped_conv3d_fwd_xdl_ngcdhw_gkczyx_ngkdhw_bf16_comp_2x_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/comp/device_grouped_conv3d_fwd_xdl_ngcdhw_gkczyx_ngkdhw_bf16_comp_instance.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/comp/device_grouped_conv3d_fwd_xdl_ngcdhw_gkczyx_ngkdhw_bf16_comp_part2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/comp/device_grouped_conv3d_fwd_xdl_ngcdhw_gkczyx_ngkdhw_f16_comp_2x_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/comp/device_grouped_conv3d_fwd_xdl_ngcdhw_gkczyx_ngkdhw_f16_comp_instance.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/comp/device_grouped_conv3d_fwd_xdl_ngcdhw_gkczyx_ngkdhw_f16_comp_part2_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/comp/device_grouped_conv3d_fwd_xdl_ngcdhw_gkczyx_ngkdhw_f32_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_gndhwc_gkzyxc_gndhwk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_gndhwc_gkzyxc_gndhwk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_gndhwc_gkzyxc_gndhwk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_gndhwc_gkzyxc_gndhwk_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_bf8_fp8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_bf8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_f16_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_f16_comp_fp8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_f32_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_fp8_bf8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_fp8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_ngcdhw_gkczyx_ngkdhw_bf16_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_ngcdhw_gkczyx_ngkdhw_bf16_instance.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_ngcdhw_gkczyx_ngkdhw_f16_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_ngcdhw_gkczyx_ngkdhw_f16_instance.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_ngcdhw_gkczyx_ngkdhw_f32_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/device_grouped_conv3d_fwd_xdl_ngcdhw_gkczyx_ngkdhw_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/large_tensor/device_grouped_conv3d_fwd_xdl_large_tensor_ndhwgc_gkzyxc_ndhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/large_tensor/device_grouped_conv3d_fwd_xdl_large_tensor_ndhwgc_gkzyxc_ndhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/large_tensor/device_grouped_conv3d_fwd_xdl_large_tensor_ndhwgc_gkzyxc_ndhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/mem/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/mem/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/mem/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_f16_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/mem/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_f16_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/mem/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_f32_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/mem/device_grouped_conv3d_fwd_xdl_ndhwgc_gkzyxc_ndhwgk_f32_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/mem/device_grouped_conv3d_fwd_xdl_ngcdhw_gkczyx_ngkdhw_bf16_mem_inter_instance.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/mem/device_grouped_conv3d_fwd_xdl_ngcdhw_gkczyx_ngkdhw_bf16_mem_intra_instance.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/mem/device_grouped_conv3d_fwd_xdl_ngcdhw_gkczyx_ngkdhw_f16_mem_inter_instance.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/mem/device_grouped_conv3d_fwd_xdl_ngcdhw_gkczyx_ngkdhw_f16_mem_intra_instance.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/mem/device_grouped_conv3d_fwd_xdl_ngcdhw_gkczyx_ngkdhw_f32_mem_inter_instance.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/mem/device_grouped_conv3d_fwd_xdl_ngcdhw_gkczyx_ngkdhw_f32_mem_intra_instance.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/merged_groups/device_grouped_conv3d_fwd_xdl_merged_groups_ndhwgc_gkzyxc_ndhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/merged_groups/device_grouped_conv3d_fwd_xdl_merged_groups_ndhwgc_gkzyxc_ndhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/merged_groups/device_grouped_conv3d_fwd_xdl_merged_groups_ndhwgc_gkzyxc_ndhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/merged_groups/device_grouped_conv3d_fwd_xdl_merged_groups_ngcdhw_gkczyx_ngkdhw_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/merged_groups/device_grouped_conv3d_fwd_xdl_merged_groups_ngcdhw_gkczyx_ngkdhw_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd/xdl/merged_groups/device_grouped_conv3d_fwd_xdl_merged_groups_ngcdhw_gkczyx_ngkdhw_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/comp/device_grouped_conv3d_fwd_bias_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/comp/device_grouped_conv3d_fwd_bias_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_fp16_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/comp/device_grouped_conv3d_fwd_bias_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_fp32_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/device_grouped_conv3d_fwd_bias_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/device_grouped_conv3d_fwd_bias_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/device_grouped_conv3d_fwd_bias_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_fp16_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/device_grouped_conv3d_fwd_bias_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_fp16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/device_grouped_conv3d_fwd_bias_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_fp32_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/device_grouped_conv3d_fwd_bias_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_fp32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/large_tensor/device_grouped_conv3d_fwd_bias_clamp_xdl_large_tensor_ndhwgc_gkzyxc_ndhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/large_tensor/device_grouped_conv3d_fwd_bias_clamp_xdl_large_tensor_ndhwgc_gkzyxc_ndhwgk_fp16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/large_tensor/device_grouped_conv3d_fwd_bias_clamp_xdl_large_tensor_ndhwgc_gkzyxc_ndhwgk_fp32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/mem/device_grouped_conv3d_fwd_bias_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/mem/device_grouped_conv3d_fwd_bias_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/mem/device_grouped_conv3d_fwd_bias_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_fp16_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/mem/device_grouped_conv3d_fwd_bias_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_fp16_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/mem/device_grouped_conv3d_fwd_bias_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_fp32_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/mem/device_grouped_conv3d_fwd_bias_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_fp32_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/merged_groups/device_grouped_conv3d_fwd_bias_clamp_xdl_merged_groups_ndhwgc_gkzyxc_ndhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/merged_groups/device_grouped_conv3d_fwd_bias_clamp_xdl_merged_groups_ndhwgc_gkzyxc_ndhwgk_fp16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bias_clamp/xdl/merged_groups/device_grouped_conv3d_fwd_bias_clamp_xdl_merged_groups_ndhwgc_gkzyxc_ndhwgk_fp32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bilinear/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bilinear/xdl/device_grouped_conv3d_fwd_xdl_bilinear_ndhwgc_gkzyxc_ndhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bilinear/xdl/device_grouped_conv3d_fwd_xdl_bilinear_ndhwgc_gkzyxc_ndhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bilinear/xdl/device_grouped_conv3d_fwd_xdl_bilinear_ndhwgc_gkzyxc_ndhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_bilinear/xdl/device_grouped_conv3d_fwd_xdl_bilinear_ndhwgc_gkzyxc_ndhwgk_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/comp/device_grouped_conv3d_fwd_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/comp/device_grouped_conv3d_fwd_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_fp16_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/comp/device_grouped_conv3d_fwd_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_fp32_comp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/device_grouped_conv3d_fwd_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/device_grouped_conv3d_fwd_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/device_grouped_conv3d_fwd_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_fp16_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/device_grouped_conv3d_fwd_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_fp16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/device_grouped_conv3d_fwd_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_fp32_16x16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/device_grouped_conv3d_fwd_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_fp32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/large_tensor/device_grouped_conv3d_fwd_clamp_xdl_large_tensor_ndhwgc_gkzyxc_ndhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/large_tensor/device_grouped_conv3d_fwd_clamp_xdl_large_tensor_ndhwgc_gkzyxc_ndhwgk_fp16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/large_tensor/device_grouped_conv3d_fwd_clamp_xdl_large_tensor_ndhwgc_gkzyxc_ndhwgk_fp32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/mem/device_grouped_conv3d_fwd_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/mem/device_grouped_conv3d_fwd_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_bf16_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/mem/device_grouped_conv3d_fwd_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_fp16_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/mem/device_grouped_conv3d_fwd_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_fp16_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/mem/device_grouped_conv3d_fwd_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_fp32_mem_inter_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/mem/device_grouped_conv3d_fwd_clamp_xdl_ndhwgc_gkzyxc_ndhwgk_fp32_mem_intra_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/merged_groups/device_grouped_conv3d_fwd_clamp_xdl_merged_groups_ndhwgc_gkzyxc_ndhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/merged_groups/device_grouped_conv3d_fwd_clamp_xdl_merged_groups_ndhwgc_gkzyxc_ndhwgk_fp16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_clamp/xdl/merged_groups/device_grouped_conv3d_fwd_clamp_xdl_merged_groups_ndhwgc_gkzyxc_ndhwgk_fp32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_convinvscale/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_convinvscale/xdl/device_grouped_conv3d_fwd_xdl_convinvscale_ndhwgc_gkzyxc_ndhwgk_f8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_convscale/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_convscale/xdl/device_grouped_conv3d_fwd_xdl_combconvscale_ndhwgc_gkzyxc_ndhwgk_f8_f8_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_convscale/xdl/device_grouped_conv3d_fwd_xdl_convscale_ndhwgc_gkzyxc_ndhwgk_bf8_f8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_convscale/xdl/device_grouped_conv3d_fwd_xdl_convscale_ndhwgc_gkzyxc_ndhwgk_bf8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_convscale/xdl/device_grouped_conv3d_fwd_xdl_convscale_ndhwgc_gkzyxc_ndhwgk_f8_bf8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_convscale/xdl/device_grouped_conv3d_fwd_xdl_convscale_ndhwgc_gkzyxc_ndhwgk_f8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_convscale_add/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_convscale_add/xdl/device_grouped_conv3d_fwd_xdl_convscale_add_ndhwgc_gkzyxc_ndhwgk_f8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_convscale_relu/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_convscale_relu/xdl/device_grouped_conv3d_fwd_xdl_combconvscale_relu_ndhwgc_gkzyxc_ndhwgk_f8_f8_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_convscale_relu/xdl/device_grouped_conv3d_fwd_xdl_convscale_relu_ndhwgc_gkzyxc_ndhwgk_f8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_dynamic_op/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_dynamic_op/xdl/device_grouped_conv3d_fwd_xdl_dynamic_op_ndhwgc_gkzyxc_ndhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_dynamic_op/xdl/device_grouped_conv3d_fwd_xdl_dynamic_op_ndhwgc_gkzyxc_ndhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_dynamic_op/xdl/device_grouped_conv3d_fwd_xdl_dynamic_op_ndhwgc_gkzyxc_ndhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_dynamic_op/xdl/device_grouped_conv3d_fwd_xdl_dynamic_op_ndhwgc_gkzyxc_ndhwgk_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_scale/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_scale/xdl/device_grouped_conv3d_fwd_xdl_scale_ndhwgc_gkzyxc_ndhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_scale/xdl/device_grouped_conv3d_fwd_xdl_scale_ndhwgc_gkzyxc_ndhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_scale/xdl/device_grouped_conv3d_fwd_xdl_scale_ndhwgc_gkzyxc_ndhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_scale/xdl/device_grouped_conv3d_fwd_xdl_scale_ndhwgc_gkzyxc_ndhwgk_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_scaleadd_ab/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_scaleadd_ab/xdl/device_grouped_conv3d_fwd_xdl_scaleadd_ab_ndhwgc_gkzyxc_ndhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_scaleadd_ab/xdl/device_grouped_conv3d_fwd_xdl_scaleadd_ab_ndhwgc_gkzyxc_ndhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_scaleadd_ab/xdl/device_grouped_conv3d_fwd_xdl_scaleadd_ab_ndhwgc_gkzyxc_ndhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_scaleadd_ab/xdl/device_grouped_conv3d_fwd_xdl_scaleadd_ab_ndhwgc_gkzyxc_ndhwgk_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_scaleadd_scaleadd_relu/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_scaleadd_scaleadd_relu/xdl/device_grouped_conv3d_fwd_xdl_scaleadd_scaleadd_relu_ndhwgc_gkzyxc_ndhwgk_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_scaleadd_scaleadd_relu/xdl/device_grouped_conv3d_fwd_xdl_scaleadd_scaleadd_relu_ndhwgc_gkzyxc_ndhwgk_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_scaleadd_scaleadd_relu/xdl/device_grouped_conv3d_fwd_xdl_scaleadd_scaleadd_relu_ndhwgc_gkzyxc_ndhwgk_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_conv3d_fwd_scaleadd_scaleadd_relu/xdl/device_grouped_conv3d_fwd_xdl_scaleadd_scaleadd_relu_ndhwgc_gkzyxc_ndhwgk_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_convnd_bwd_weight/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_convnd_bwd_weight/explicit_xdl/bf16_bf16_bf16/device_grouped_convnd_bwd_weight_bf16_bf16_bf16_exp_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_convnd_bwd_weight/explicit_xdl/bf16_bf16_bf16/device_grouped_convnd_bwd_weight_bf16_bf16_bf16_exp_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_convnd_bwd_weight/explicit_xdl/bf16_bf16_bf16/device_grouped_convnd_bwd_weight_bf16_bf16_bf16_exp_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_convnd_bwd_weight/explicit_xdl/bf16_bf16_bf16/device_grouped_convnd_bwd_weight_bf16_bf16_bf16_exp_mem_v1_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_convnd_bwd_weight/explicit_xdl/bf16_bf16_bf16/device_grouped_convnd_bwd_weight_bf16_bf16_bf16_exp_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_convnd_bwd_weight/explicit_xdl/bf16_bf16_bf16/device_grouped_convnd_bwd_weight_bf16_bf16_bf16_exp_mem_v2_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_convnd_bwd_weight/explicit_xdl/bf16_bf16_bf16/device_grouped_convnd_bwd_weight_bf16_bf16_bf16_exp_odd_m_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_convnd_bwd_weight/explicit_xdl/bf16_bf16_bf16/device_grouped_convnd_bwd_weight_bf16_bf16_bf16_exp_odd_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_convnd_bwd_weight/explicit_xdl/bf16_bf16_bf16/device_grouped_convnd_bwd_weight_bf16_bf16_bf16_exp_odd_n_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_convnd_bwd_weight/explicit_xdl/fp16_fp16_fp16/device_grouped_convnd_bwd_weight_f16_f16_f16_exp_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_convnd_bwd_weight/explicit_xdl/fp16_fp16_fp16/device_grouped_convnd_bwd_weight_f16_f16_f16_exp_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_convnd_bwd_weight/explicit_xdl/fp16_fp16_fp16/device_grouped_convnd_bwd_weight_f16_f16_f16_exp_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_convnd_bwd_weight/explicit_xdl/fp16_fp16_fp16/device_grouped_convnd_bwd_weight_f16_f16_f16_exp_mem_v1_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_convnd_bwd_weight/explicit_xdl/fp16_fp16_fp16/device_grouped_convnd_bwd_weight_f16_f16_f16_exp_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_convnd_bwd_weight/explicit_xdl/fp16_fp16_fp16/device_grouped_convnd_bwd_weight_f16_f16_f16_exp_mem_v2_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_convnd_bwd_weight/explicit_xdl/fp16_fp16_fp16/device_grouped_convnd_bwd_weight_f16_f16_f16_exp_odd_m_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_convnd_bwd_weight/explicit_xdl/fp16_fp16_fp16/device_grouped_convnd_bwd_weight_f16_f16_f16_exp_odd_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_convnd_bwd_weight/explicit_xdl/fp16_fp16_fp16/device_grouped_convnd_bwd_weight_f16_f16_f16_exp_odd_n_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_multiple_d_splitk_xdl_two_stage_bf16_bf16_bf16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_multiple_d_splitk_xdl_two_stage_bf16_bf16_bf16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_multiple_d_splitk_xdl_two_stage_bf16_i8_bf16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_multiple_d_splitk_xdl_two_stage_bf16_i8_bf16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_multiple_d_splitk_xdl_two_stage_f16_f16_f16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_f16_f16_f16_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_f16_f16_f16_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_f16_f16_f16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_f16_f16_f16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_splitk_bf16_bf16_bf16_km_kn_mn_irregular_pv1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_splitk_bf16_bf16_bf16_km_kn_mn_irregular_pv1_inter.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_splitk_bf16_bf16_bf16_km_kn_mn_irregular_pv2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_splitk_bf16_bf16_bf16_mk_kn_mn_irregular_pv1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_splitk_bf16_bf16_bf16_mk_kn_mn_irregular_pv1_inter.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_splitk_bf16_bf16_bf16_mk_kn_mn_irregular_pv2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_splitk_bf16_bf16_bf16_mk_nk_mn_irregular_pv1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_splitk_bf16_bf16_bf16_mk_nk_mn_irregular_pv1_inter.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_splitk_bf16_bf16_bf16_mk_nk_mn_irregular_pv2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_splitk_f16_f16_f16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_splitk_f16_f16_f16_mk_kn_mn_irregular_pv1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_splitk_f16_f16_f16_mk_kn_mn_irregular_pv1_inter.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_splitk_f16_f16_f16_mk_kn_mn_irregular_pv2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_splitk_f16_f16_f16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_splitk_f16_f16_f16_mk_nk_mn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_splitk_f16_f8_f16_mk_kn_mn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm/device_grouped_gemm_xdl_splitk_f8_f16_f16_mk_kn_mn_irregular_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_bias/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_bias/device_grouped_gemm_xdl_fixed_nk_bias_f16_f16_f16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_bias/device_grouped_gemm_xdl_fixed_nk_bias_f16_f16_f16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_bias/device_grouped_gemm_xdl_fixed_nk_bias_f16_f16_f32_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_bias/device_grouped_gemm_xdl_fixed_nk_bias_f16_f16_f32_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fastgelu/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fastgelu/device_grouped_gemm_fastgelu_xdl_f16_f16_f16_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fastgelu/device_grouped_gemm_fastgelu_xdl_f16_f16_f16_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fastgelu/device_grouped_gemm_fastgelu_xdl_f16_f16_f16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fastgelu/device_grouped_gemm_fastgelu_xdl_f16_f16_f16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fixed_nk/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fixed_nk/device_grouped_gemm_xdl_fixed_nk_bf16_bf16_bf16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fixed_nk/device_grouped_gemm_xdl_fixed_nk_bf16_bf16_bf16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fixed_nk/device_grouped_gemm_xdl_fixed_nk_bf16_i8_bf16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fixed_nk/device_grouped_gemm_xdl_fixed_nk_bf16_i8_bf16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fixed_nk/device_grouped_gemm_xdl_fixed_nk_f16_f16_f16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fixed_nk/device_grouped_gemm_xdl_fixed_nk_f16_f16_f16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fixed_nk/device_grouped_gemm_xdl_fixed_nk_f16_fp8_f16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fixed_nk/device_grouped_gemm_xdl_fixed_nk_f16_fp8_f16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fixed_nk/device_grouped_gemm_xdl_fixed_nk_f16_i8_f16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fixed_nk/device_grouped_gemm_xdl_fixed_nk_f16_i8_f16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fixed_nk_multi_abd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fixed_nk_multi_abd/device_grouped_gemm_xdl_fixed_nk_bf16_i8_bf16_km_kn_mn_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fixed_nk_multi_abd/device_grouped_gemm_xdl_fixed_nk_bf16_i8_bf16_mk_kn_mn_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fixed_nk_multi_abd/device_grouped_gemm_xdl_fixed_nk_bf16_i8_bf16_mk_nk_mn_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fixed_nk_multi_abd/device_grouped_gemm_xdl_fixed_nk_bias_gelu_bf16_i8_bf16_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fixed_nk_multi_abd/device_grouped_gemm_xdl_fixed_nk_bias_gelu_bf16_i8_bf16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_fixed_nk_multi_abd/device_grouped_gemm_xdl_fixed_nk_bias_gelu_bf16_i8_bf16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_tile_loop/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_tile_loop/device_grouped_gemm_xdl_tile_loop_f16_f16_f16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_tile_loop/device_grouped_gemm_xdl_tile_loop_f16_f16_f16_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_tile_loop/device_grouped_gemm_xdl_tile_loop_multiply_bf16_i8_bf16_mk_kn_mn.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_tile_loop/device_grouped_gemm_xdl_tile_loop_multiply_bf16_i8_bf16_mk_kn_mn_comp_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_tile_loop/device_grouped_gemm_xdl_tile_loop_multiply_bf16_i8_bf16_mk_kn_mn_comp_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_tile_loop/device_grouped_gemm_xdl_tile_loop_multiply_bf16_i8_bf16_mk_kn_mn_comp_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_tile_loop/device_grouped_gemm_xdl_tile_loop_multiply_bf16_i8_bf16_mk_kn_mn_comp_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_tile_loop/device_grouped_gemm_xdl_tile_loop_multiply_bf16_i8_bf16_mk_kn_mn_mem_v1_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_tile_loop/device_grouped_gemm_xdl_tile_loop_multiply_bf16_i8_bf16_mk_kn_mn_mem_v1_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_tile_loop/device_grouped_gemm_xdl_tile_loop_multiply_bf16_i8_bf16_mk_kn_mn_mem_v1_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_tile_loop/device_grouped_gemm_xdl_tile_loop_multiply_bf16_i8_bf16_mk_kn_mn_mem_v1_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_tile_loop/device_grouped_gemm_xdl_tile_loop_multiply_bf16_i8_bf16_mk_kn_mn_mem_v2_default_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_tile_loop/device_grouped_gemm_xdl_tile_loop_multiply_bf16_i8_bf16_mk_kn_mn_mem_v2_kpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_tile_loop/device_grouped_gemm_xdl_tile_loop_multiply_bf16_i8_bf16_mk_kn_mn_mem_v2_mnkpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_tile_loop/device_grouped_gemm_xdl_tile_loop_multiply_bf16_i8_bf16_mk_kn_mn_mem_v2_mnpadding_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_tile_loop/device_grouped_gemm_xdl_tile_loop_multiply_bias_bf16_i8_bf16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_tile_loop/device_grouped_gemm_xdl_tile_loop_multiply_bias_fastgelu_bf16_i8_bf16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/grouped_gemm_tile_loop/device_grouped_gemm_xdl_tile_loop_multiply_fastgelu_bf16_i8_bf16_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/image_to_column/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/image_to_column/device_image_to_column_gndhwc_3d_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/image_to_column/device_image_to_column_gnhwc_2d_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/image_to_column/device_image_to_column_gnwc_1d_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/image_to_column/device_image_to_column_ndhwgc_3d_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/image_to_column/device_image_to_column_nhwgc_2d_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/image_to_column/device_image_to_column_nwgc_1d_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/max_pool_bwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/max_pool_bwd/device_max_pool_bwd_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/max_pool_bwd/device_max_pool_bwd_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/max_pool_bwd/device_max_pool_bwd_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/max_pool_bwd/device_max_pool_bwd_f8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/max_pool_bwd/device_max_pool_bwd_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/max_pool_bwd/max_pool_bwd_instance_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/mha/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_bwd_data/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_bwd_data/device_groupnorm_bwd_data_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_bwd_data/device_layernorm2d_bwd_data_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_bwd_data/device_layernorm2d_bwd_data_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_bwd_data/normalization_bwd_data_instance_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_bwd_gamma_beta/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_bwd_gamma_beta/device_groupnorm_bwd_gamma_beta_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_bwd_gamma_beta/device_layernorm2d_bwd_gamma_beta_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_bwd_gamma_beta/device_layernorm2d_bwd_gamma_beta_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_bwd_gamma_beta/normalization_bwd_gamma_beta_instance_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_fwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_fwd/device_groupnorm_fwd_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_fwd/device_groupnorm_fwd_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_fwd/device_groupnorm_fwd_swish_f16_f32_f32_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_fwd/device_groupnorm_fwd_swish_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_fwd/device_groupnorm_fwd_swish_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_fwd/device_layernorm2d_fwd_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_fwd/device_layernorm2d_fwd_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_fwd/device_layernorm4d_fwd_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_fwd/device_layernorm4d_fwd_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/normalization_fwd/normalization_fwd_instance_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/permute_scale/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/permute_scale/device_permute_scale_1d_fp16_instances.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/permute_scale/device_permute_scale_1d_fp32_instances.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/permute_scale/device_permute_scale_2d_fp16_instances.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/permute_scale/device_permute_scale_2d_fp32_instances.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/permute_scale/device_permute_scale_3d_fp16_instances.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/permute_scale/device_permute_scale_3d_fp32_instances.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/permute_scale/device_permute_scale_4d_fp16_instances.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/permute_scale/device_permute_scale_4d_fp32_instances.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/permute_scale/device_permute_scale_5d_fp16_instances.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/permute_scale/device_permute_scale_5d_fp32_instances.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/permute_scale/device_permute_scale_6d_fp16_instances.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/permute_scale/device_permute_scale_6d_fp32_fp8_instances.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/permute_scale/device_permute_scale_6d_fp32_instances.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool2d_fwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool2d_fwd/device_avg_pool2d_fwd_nhwc_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool2d_fwd/device_avg_pool2d_fwd_nhwc_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool2d_fwd/device_avg_pool2d_fwd_nhwc_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool2d_fwd/device_avg_pool2d_fwd_nhwc_f8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool2d_fwd/device_avg_pool2d_fwd_nhwc_i8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool2d_fwd/device_max_pool2d_fwd_nhwc_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool2d_fwd/device_max_pool2d_fwd_nhwc_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool2d_fwd/device_max_pool2d_fwd_nhwc_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool2d_fwd/device_max_pool2d_fwd_nhwc_f8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool2d_fwd/device_max_pool2d_fwd_nhwc_i8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool2d_fwd/pool2d_fwd_instance_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool3d_fwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool3d_fwd/device_avg_pool3d_fwd_ndhwc_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool3d_fwd/device_avg_pool3d_fwd_ndhwc_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool3d_fwd/device_avg_pool3d_fwd_ndhwc_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool3d_fwd/device_avg_pool3d_fwd_ndhwc_f8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool3d_fwd/device_avg_pool3d_fwd_ndhwc_i8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool3d_fwd/device_max_pool3d_fwd_ndhwc_bf16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool3d_fwd/device_max_pool3d_fwd_ndhwc_f16_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool3d_fwd/device_max_pool3d_fwd_ndhwc_f32_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool3d_fwd/device_max_pool3d_fwd_ndhwc_f8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool3d_fwd/device_max_pool3d_fwd_ndhwc_i8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/pool3d_fwd/pool_fwd_instance_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/conv2d_fwd/conv2d_quantization_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/conv2d_fwd/device_conv2d_dl_bias_perchannel_quantization_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/conv2d_fwd/device_conv2d_dl_bias_perlayer_quantization_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/conv2d_fwd/device_conv2d_dl_int8_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/conv2d_fwd/device_conv2d_dl_perchannel_quantization_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/conv2d_fwd/device_conv2d_dl_perlayer_quantization_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/conv2d_fwd/device_conv2d_xdl_bias_perchannel_quantization_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/conv2d_fwd/device_conv2d_xdl_bias_perlayer_quantization_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/conv2d_fwd/device_conv2d_xdl_int8_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/conv2d_fwd/device_conv2d_xdl_perchannel_quantization_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/conv2d_fwd/device_conv2d_xdl_perlayer_quantization_int8_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/gemm/device_gemm_quantization_dl_c_shuffle_i8_i8_i8_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/gemm/device_gemm_quantization_dl_c_shuffle_i8_i8_i8_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/gemm/device_gemm_quantization_dl_c_shuffle_i8_i8_i8_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/gemm/device_gemm_quantization_dl_c_shuffle_i8_i8_i8_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/gemm/device_gemm_quantization_dl_c_shuffle_i8_i8_i8_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/gemm/device_gemm_quantization_xdl_c_shuffle_i8_i8_i8_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/gemm/device_gemm_quantization_xdl_c_shuffle_i8_i8_i8_km_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/gemm/device_gemm_quantization_xdl_c_shuffle_i8_i8_i8_km_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/gemm/device_gemm_quantization_xdl_c_shuffle_i8_i8_i8_mk_kn_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/gemm/device_gemm_quantization_xdl_c_shuffle_i8_i8_i8_mk_nk_mn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/quantization/gemm/gemm_quantization_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_b16_f32_b16_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_b16_f32_b16_amax.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_b16_f32_b16_avg.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_b16_f32_b16_max.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_b16_f32_b16_min.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_b16_f32_b16_norm2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f16_f16_f16_amax.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f16_f16_f16_max.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f16_f16_f16_min.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f16_f32_f16_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f16_f32_f16_avg.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f16_f32_f16_norm2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f32_f32_f32_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f32_f32_f32_amax.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f32_f32_f32_avg.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f32_f32_f32_max.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f32_f32_f32_min.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f32_f32_f32_norm2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f32_f64_f32_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f32_f64_f32_avg.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f32_f64_f32_norm2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f64_f64_f64_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f64_f64_f64_amax.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f64_f64_f64_avg.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f64_f64_f64_max.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f64_f64_f64_min.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_f64_f64_f64_norm2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_i8_i32_i8_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_i8_i32_i8_avg.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_i8_i8_i8_amax.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_i8_i8_i8_max.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_blockwise_i8_i8_i8_min.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add_b16_f32_f32_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add_b16_f32_f32_avg.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add_f16_f32_f32_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add_f16_f32_f32_avg.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add_f32_f32_f32_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add_f32_f32_f32_avg.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add_f32_f64_f32_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add_f32_f64_f32_avg.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add_f64_f64_f64_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_multiblock_atomic_add_f64_f64_f64_avg.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_b16_f32_b16_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_b16_f32_b16_amax.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_b16_f32_b16_avg.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_b16_f32_b16_max.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_b16_f32_b16_min.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_b16_f32_b16_norm2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f16_f16_f16_amax.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f16_f16_f16_max.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f16_f16_f16_min.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f16_f32_f16_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f16_f32_f16_avg.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f16_f32_f16_norm2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f32_f32_f32_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f32_f32_f32_amax.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f32_f32_f32_avg.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f32_f32_f32_max.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f32_f32_f32_min.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f32_f32_f32_norm2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f32_f64_f32_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f32_f64_f32_avg.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f32_f64_f32_norm2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f64_f64_f64_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f64_f64_f64_amax.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f64_f64_f64_avg.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f64_f64_f64_max.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f64_f64_f64_min.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_f64_f64_f64_norm2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_i8_i32_i8_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_i8_i32_i8_avg.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_i8_i8_i8_amax.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_i8_i8_i8_max.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/reduce/device_reduce_instance_threadwise_i8_i8_i8_min.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/softmax/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/softmax/device_softmax_f16_f16_instance_rank3_reduce1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/softmax/device_softmax_f16_f16_instance_rank3_reduce2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/softmax/device_softmax_f16_f16_instance_rank3_reduce3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/softmax/device_softmax_f16_f16_instance_rank4_reduce1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/softmax/device_softmax_f16_f16_instance_rank4_reduce2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/softmax/device_softmax_f16_f16_instance_rank4_reduce3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/softmax/device_softmax_f16_f16_instance_rank4_reduce4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/softmax/device_softmax_f32_f32_instance_rank3_reduce1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/softmax/device_softmax_f32_f32_instance_rank3_reduce2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/softmax/device_softmax_f32_f32_instance_rank3_reduce3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/softmax/device_softmax_f32_f32_instance_rank4_reduce1.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/softmax/device_softmax_f32_f32_instance_rank4_reduce2.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/softmax/device_softmax_f32_f32_instance_rank4_reduce3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/softmax/device_softmax_f32_f32_instance_rank4_reduce4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/transpose/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/tensor_operation_instance/gpu/transpose/device_transpose_instances_3d.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/utility/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/utility/convolution_parameter.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/utility/device_memory.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/library/src/utility/host_tensor.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/data_type_enum.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_avg_pool2d_bwd_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_avg_pool3d_bwd_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_batched_gemm_add_relu_gemm_add_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_batched_gemm_b_scale_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_batched_gemm_bias_softmax_gemm_permute_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_batched_gemm_gemm_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_batched_gemm_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_batched_gemm_reduce_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_batched_gemm_softmax_gemm_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_batched_gemm_softmax_gemm_permute_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_batchnorm_backward_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_batchnorm_forward_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_batchnorm_infer_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_contraction_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_contraction_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_conv_bwd_data_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_conv_fwd_bias_relu_add_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_conv_fwd_bias_relu_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_conv_fwd_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_conv_tensor_rearrange_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_elementwise_layernorm_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_ab_scale_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_add_add_fastgelu_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_add_fastgelu_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_add_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_add_multiply_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_add_relu_add_layernorm_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_add_relu_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_add_silu_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_b_scale_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_bias_add_reduce_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_bilinear_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_blockscale_wp_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_fastgelu_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_multiply_add_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_multiply_multiply_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_multiply_multiply_wp_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_mx_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_reduce_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_splitk_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_streamk_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_universal_batched_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_universal_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_universal_preshuffle_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_universal_reduce_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_gemm_universal_streamk_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_grouped_conv_bwd_data_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_grouped_conv_bwd_weight_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_grouped_conv_fwd_bias_clamp_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_grouped_conv_fwd_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_grouped_conv_fwd_outelementop_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_grouped_gemm_fastgelu_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_grouped_gemm_fixed_nk_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_grouped_gemm_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_grouped_gemm_multiply_tile_loop_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_grouped_gemm_tile_loop_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_groupnorm_bwd_data_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_groupnorm_bwd_gamma_beta_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_groupnorm_fwd_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_layernorm_bwd_data_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_layernorm_bwd_gamma_beta_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_layernorm_fwd_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_max_pool2d_bwd_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_max_pool3d_bwd_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_permute_scale_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_pool2d_fwd_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_pool3d_fwd_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_reduce_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_softmax_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/include/profiler/profile_transpose_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_avg_pool2d_bwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_avg_pool3d_bwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_batched_gemm.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_batched_gemm_add_relu_gemm_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_batched_gemm_b_scale.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_batched_gemm_gemm.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_batched_gemm_multi_d.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_batched_gemm_reduce.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_batchnorm_bwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_batchnorm_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_batchnorm_infer.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_contraction_bilinear.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_contraction_scale.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_conv_bwd_data.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_conv_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_conv_fwd_bias_relu.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_conv_fwd_bias_relu_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_conv_tensor_rearrange.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_ab_scale.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_add_add_fastgelu.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_add_fastgelu.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_add_multiply.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_add_relu.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_add_relu_add_layernorm.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_add_silu.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_b_scale.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_bias_add_reduce.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_bilinear.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_blockscale_wp.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_fastgelu.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_multiply_add.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_multiply_multiply.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_multiply_multiply_wp.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_mx.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_reduce.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_splitk.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_streamk.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_universal.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_universal_batched.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_universal_preshuffle.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_universal_reduce.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_gemm_universal_streamk.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_grouped_conv_bwd_data.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_grouped_conv_bwd_weight.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_grouped_conv_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_grouped_conv_fwd_bias_clamp.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_grouped_conv_fwd_clamp.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_grouped_conv_fwd_outelementop.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_grouped_gemm.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_grouped_gemm_fastgelu.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_grouped_gemm_fixed_nk.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_grouped_gemm_multiply_tile_loop.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_grouped_gemm_tile_loop.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_groupnorm_bwd_data.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_groupnorm_bwd_gamma_beta.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_groupnorm_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_layernorm_bwd_data.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_layernorm_bwd_gamma_beta.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_layernorm_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_max_pool2d_bwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_max_pool2d_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_max_pool3d_bwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_permute_scale.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_pool3d_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_reduce.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_softmax.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profile_transpose.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profiler.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/profiler/src/profiler_operation_registry.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/pyproject.toml +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/python/ck4inductor/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/python/ck4inductor/batched_universal_gemm/gen_instances.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/python/ck4inductor/batched_universal_gemm/op.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/python/ck4inductor/grouped_conv_fwd/gen_instances.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/python/ck4inductor/grouped_conv_fwd/op.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/python/ck4inductor/universal_gemm/gen_instances.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/python/ck4inductor/universal_gemm/op.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/python/ck4inductor/util.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/python/test/test_gen_instances.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/rbuild.ini +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/requirements.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/check_copyright_year.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/clang-format-overwrite.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/cmake-ck-dev.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/cmake-ck-release.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/convert_miopen_driver_to_profiler.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/count_vgpr.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/dependency-parser/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/dependency-parser/main.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/dependency-parser/src/enhanced_ninja_parser.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/dependency-parser/src/selective_test_filter.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/hip_fatbin_insert +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/hipclang_opt.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/install_precommit.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/launch_tests.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/ninja_json_converter.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/process_perf_data.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/process_perf_data.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/process_qa_data.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/profile_batched_gemm.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/profile_gemm.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/profile_gemm_bilinear.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/profile_grouped_conv_bwd_data.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/profile_grouped_conv_bwd_weight.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/profile_grouped_conv_fwd.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/profile_grouped_conv_fwd_outelementop.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/profile_grouped_gemm.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/profile_mixed_gemm.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/profile_onnx_gemm.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/profile_permute_scale.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/profile_reduce_no_index.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/profile_reduce_with_index.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/profile_resnet50.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/profile_splitK_gemm.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/redis-cli.conf +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/remod_for_ck_tile.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/remove_exec_bit.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/run_ck_profiler_gemm_with_csv_shapes.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/run_full_performance_tests.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/run_gemm_performance_tests.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/run_performance_tests.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/sccache_wrapper.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/test_convnd_fwd.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/test_reduce_no_index.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/script/uninstall_precommit.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm/test_batched_gemm_wmma.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm/test_batched_gemm_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm_gemm/test_batched_gemm_gemm_fp16_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm_gemm/test_batched_gemm_gemm_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm_multi_d/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm_multi_d/test_batched_gemm_multi_d_dl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm_reduce/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm_reduce/batched_gemm_reduce_fp16_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm_softmax_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm_softmax_gemm/test_batched_gemm_softmax_gemm_fp16_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm_softmax_gemm/test_batched_gemm_softmax_gemm_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm_softmax_gemm_permute/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm_softmax_gemm_permute/test_batched_gemm_bias_softmax_gemm_permute_bf16_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm_softmax_gemm_permute/test_batched_gemm_bias_softmax_gemm_permute_fp16_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm_softmax_gemm_permute/test_batched_gemm_bias_softmax_gemm_permute_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm_softmax_gemm_permute/test_batched_gemm_device_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm_softmax_gemm_permute/test_batched_gemm_softmax_gemm_permute_bf16_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm_softmax_gemm_permute/test_batched_gemm_softmax_gemm_permute_fp16_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batched_gemm_softmax_gemm_permute/test_batched_gemm_softmax_gemm_permute_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batchnorm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batchnorm/batchnorm_bwd_rank_4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batchnorm/batchnorm_fwd_rank_4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/batchnorm/batchnorm_infer_rank_4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/block_swizzle_test/block_swizzle_test.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/block_swizzle_test/rebuild.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/block_swizzle_test/simple_args.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/block_to_ctile_map/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/block_to_ctile_map/test_block_to_ctile_map.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/add_rmsnorm2d_rdquant_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/add_rmsnorm2d_rdquant_fwd.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/add_rmsnorm2d_rdquant_fwd_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/add_rmsnorm2d_rdquant_fwd_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_api.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n1024_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n1536_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n2048_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n256_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n3072_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n4096_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n512_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n64_n128_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n768_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n8192_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_bf16_n8192_tp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n1024_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n1536_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n2048_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n256_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n3072_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n4096_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n512_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n64_n128_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n768_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n8192_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_fp16_n8192_tp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/add_rmsnorm2d_rdquant/instances/add_rmsnorm2d_rdquant_fwd_instance_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/batched_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/batched_gemm/test_batched_gemm.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/batched_gemm/test_batched_gemm_ut_cases.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/batched_gemm/test_batched_gemm_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/batched_transpose/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/batched_transpose/batched_transpose.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/batched_transpose/batched_transpose.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/batched_transpose/batched_transpose_api.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/batched_transpose/batched_transpose_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/batched_transpose/batched_transpose_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/batched_transpose/batched_transpose_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/batched_transpose/test_batched_transpose.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/container/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/container/test_tuple_apply.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/data_type/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/data_type/test_pk_fp4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/data_type/test_pk_int4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/elementwise/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/elementwise/test_elementwise_1d.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm/test_gemm_pipeline_basic_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm/test_gemm_pipeline_basic_bf8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm/test_gemm_pipeline_basic_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm/test_gemm_pipeline_basic_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm/test_gemm_pipeline_basic_run_test.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm/test_gemm_pipeline_compv3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm/test_gemm_pipeline_compv4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm/test_gemm_pipeline_kernel_types.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm/test_gemm_pipeline_mem.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm/test_gemm_pipeline_persistent.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm/test_gemm_pipeline_smoke_run_test.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm/test_gemm_pipeline_smoke_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm/test_gemm_pipeline_universal_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm/test_gemm_pipeline_universal_bf8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm/test_gemm_pipeline_universal_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm/test_gemm_pipeline_universal_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm/test_gemm_pipeline_universal_run_test.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm/test_gemm_pipeline_ut_cases.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm/test_gemm_pipeline_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm_block_scale/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm_block_scale/test_gemm_aquant_basic_bf8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm_block_scale/test_gemm_aquant_basic_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm_block_scale/test_gemm_aquant_basic_i4bf8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm_block_scale/test_gemm_aquant_basic_i4f32bf8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm_block_scale/test_gemm_aquant_basic_i4f32fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm_block_scale/test_gemm_aquant_basic_i4fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm_block_scale/test_gemm_aquant_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm_block_scale/test_run_gemm_aquant_example.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm_multi_d/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm_multi_d/test_gemm_multi_d.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm_multi_d/test_gemm_multi_d_ut_cases.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm_multi_d/test_gemm_multi_d_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm_weight_preshuffle/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm_weight_preshuffle/test_gemm_pipeline_kernel_types.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm_weight_preshuffle/test_gemm_pipeline_ut_cases.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm_weight_preshuffle/test_gemm_pipeline_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/gemm_weight_preshuffle/test_gemm_pipeline_wp.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/grouped_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/grouped_gemm/test_grouped_gemm.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/grouped_gemm/test_grouped_gemm_ut_cases.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/grouped_gemm/test_grouped_gemm_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/image_to_column/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/image_to_column/test_tile_image_to_column.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/layernorm2d/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/layernorm2d/generate.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/layernorm2d/layernorm2d_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/layernorm2d/layernorm2d_fwd.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/layernorm2d/layernorm2d_fwd_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/layernorm2d/layernorm2d_fwd_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/memory_copy/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/memory_copy/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/memory_copy/test_copy.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/memory_copy/test_copy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_bf16_n1024_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_bf16_n1536_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_bf16_n2048_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_bf16_n256_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_bf16_n3072_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_bf16_n4096_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_bf16_n4096_tp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_bf16_n512_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_bf16_n64_n128_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_bf16_n768_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_fp16_n1024_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_fp16_n1536_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_fp16_n2048_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_fp16_n256_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_fp16_n3072_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_fp16_n4096_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_fp16_n4096_tp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_fp16_n512_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_fp16_n64_n128_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_fp16_n768_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_fwd_api.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/instances/moe_smoothquant_instance_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/moe_smoothquant.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/moe_smoothquant.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/moe_smoothquant_bf16_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/moe_smoothquant_bf16_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/moe_smoothquant_fp16_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_smoothquant/moe_smoothquant_fp16_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_sorting/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_sorting/moe_sorting_api.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_sorting/moe_sorting_api.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/moe_sorting/moe_sorting_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/permute/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/permute/alternative_impl/matrix_core_swizzle.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/permute/alternative_impl/matrix_core_swizzle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/permute/alternative_impl/matrix_core_swizzle_kernel.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/permute/permute.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/permute/permute_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/permute/permute_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/permute/permute_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/permute/permute_utils.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/rmsnorm2d/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/rmsnorm2d/generate.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/rmsnorm2d/rmsnorm2d_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/rmsnorm2d/rmsnorm2d_fwd.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/rmsnorm2d/rmsnorm2d_fwd_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/rmsnorm2d/rmsnorm2d_fwd_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/slice_tile/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/slice_tile/test_slice_tile.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_bf16_n1024_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_bf16_n1536_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_bf16_n2048_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_bf16_n256_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_bf16_n3072_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_bf16_n4096_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_bf16_n4096_tp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_bf16_n512_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_bf16_n64_n128_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_bf16_n768_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_fp16_n1024_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_fp16_n1536_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_fp16_n2048_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_fp16_n256_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_fp16_n3072_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_fp16_n4096_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_fp16_n4096_tp_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_fp16_n512_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_fp16_n64_n128_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_fp16_n768_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_fwd_api.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/instances/smoothquant_instance_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/smoothquant.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/smoothquant.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/smoothquant_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/smoothquant/smoothquant_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/topk_softmax/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/topk_softmax/test_topk_softmax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/topk_softmax/test_topk_softmax_api.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/topk_softmax/test_topk_softmax_api.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/topk_softmax/test_topk_softmax_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/ck_tile/topk_softmax/test_topk_softmax_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/contraction/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/contraction/test_contraction_interface_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/contraction/test_contraction_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/conv_tensor_rearrange/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/conv_tensor_rearrange/test_conv_tensor_rearrange.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/conv_tensor_rearrange/test_conv_tensor_rearrange_interface.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/conv_util/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/conv_util/conv_util.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/convnd_bwd_data/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/convnd_bwd_data/convnd_bwd_data_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/convnd_fwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/convnd_fwd/convnd_fwd_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/data_type/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/data_type/test_bf6.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/data_type/test_bf8_fnuz.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/data_type/test_bf8_ocp.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/data_type/test_bhalf.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/data_type/test_custom_type.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/data_type/test_e8m0.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/data_type/test_fp4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/data_type/test_fp6.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/data_type/test_fp8_fnuz.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/data_type/test_fp8_ocp.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/data_type/test_int4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/data_type/test_mx_bf8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/data_type/test_mx_fp4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/data_type/test_mx_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/data_type/test_pk_i4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/data_type/type_convert_const.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/elementwise_normalization/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/elementwise_normalization/test_elementwise_layernorm_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm/gemm_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm/gemm_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm/gemm_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm/gemm_fp64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm/gemm_int8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm/gemm_standalone_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm/gemm_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm/instance/gemm_f16_nn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm/instance/gemm_f16_nn_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm/instance/gemm_f16_nt_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm/instance/gemm_f16_nt_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm/instance/gemm_f16_tn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm/instance/gemm_f16_tn_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm/instance/gemm_f16_tt_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm/instance/gemm_f16_tt_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm/instance/gemm_wavelet_f16_tn_instance.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm/instance/gemm_wavelet_f16_tn_instance.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm/run_gemm_test.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_add/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_add/test_gemm_add_fastgelu_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_add/test_gemm_add_relu_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_add/test_gemm_add_silu_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_add/test_gemm_add_xdl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_b_scale/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_b_scale/test_gemm_b_scale_ut_cases.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_b_scale/test_gemm_b_scale_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_b_scale/test_gemm_b_scale_wmma.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_b_scale/test_gemm_b_scale_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_layernorm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_layernorm/test_gemm_add_relu_add_layernorm_fp16_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_mx/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_mx/test_gemm_mx.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_mx/test_gemm_mx_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_reduce/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_reduce/gemm_reduce_fp16_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_split_k/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_split_k/test_gemm_splitk_ut_cases.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_split_k/test_gemm_splitk_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_split_k/test_gemm_splitk_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_universal/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_universal/test_gemm_universal_ut_cases_bf16.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_universal/test_gemm_universal_ut_cases_fp16.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_universal/test_gemm_universal_ut_cases_fp8.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_universal/test_gemm_universal_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_universal/test_gemm_universal_wmma_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_universal/test_gemm_universal_wmma_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_universal/test_gemm_universal_wmma_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_universal/test_gemm_universal_xdl_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_universal/test_gemm_universal_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_universal/test_gemm_universal_xdl_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_universal_streamk/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_universal_streamk/test_gemm_universal_streamk_ut_cases_bf16.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_universal_streamk/test_gemm_universal_streamk_ut_cases_fp16.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_universal_streamk/test_gemm_universal_streamk_ut_cases_fp8.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_universal_streamk/test_gemm_universal_streamk_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_universal_streamk/test_gemm_universal_streamk_xdl_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_universal_streamk/test_gemm_universal_streamk_xdl_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/gemm_universal_streamk/test_gemm_universal_streamk_xdl_fp8.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_bwd_data/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_bwd_data/test_grouped_convnd_bwd_data_interface_wmma.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_bwd_data/test_grouped_convnd_bwd_data_interface_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_bwd_data/test_grouped_convnd_bwd_data_wmma.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_bwd_data/test_grouped_convnd_bwd_data_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_bwd_data/test_grouped_convnd_bwd_data_xdl_large_cases.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_bwd_weight/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_bwd_weight/test_grouped_conv_bwd_weight_xdl_bilinear.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_bwd_weight/test_grouped_convnd_bwd_weight.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_bwd_weight/test_grouped_convnd_bwd_weight_interface_wmma.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_bwd_weight/test_grouped_convnd_bwd_weight_interface_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_bwd_weight/test_grouped_convnd_bwd_weight_v3_interface_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_fwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_fwd/test_grouped_convnd_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_fwd/test_grouped_convnd_fwd_large_cases_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_fwd/test_grouped_convnd_fwd_multi_ab_interface.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_fwd/test_grouped_convnd_fwd_multi_d_interface_compatibility_xdl_wmma.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_fwd_activation/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_fwd_activation/test_grouped_convnd_fwd_bias_clamp.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_fwd_activation/test_grouped_convnd_fwd_bias_clamp_large_cases.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_fwd_activation/test_grouped_convnd_fwd_clamp.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_convnd_fwd_activation/test_grouped_convnd_fwd_gk_bias_clamp.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_gemm/test_grouped_gemm_interface_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_gemm/test_grouped_gemm_splitk_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_gemm/test_grouped_gemm_two_stage_multiple_d_splitk_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_gemm/test_grouped_gemm_two_stage_ut_cases.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_gemm/test_grouped_gemm_ut_cases.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/grouped_gemm/test_grouped_gemm_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/magic_number_division/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/magic_number_division/magic_number_division.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/mx_mfma_op/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/mx_mfma_op/mx_mfma_op.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/mx_mfma_op/mx_mfma_op.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/normalization_bwd_data/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/normalization_bwd_data/test_groupnorm_bwd_data_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/normalization_bwd_data/test_layernorm2d_bwd_data_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/normalization_bwd_gamma_beta/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/normalization_bwd_gamma_beta/test_groupnorm_bwd_gamma_beta_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/normalization_bwd_gamma_beta/test_layernorm2d_bwd_gamma_beta_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/normalization_fwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/normalization_fwd/test_groupnorm_fwd_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/normalization_fwd/test_groupnorm_fwd_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/normalization_fwd/test_layernorm2d_fwd_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/normalization_fwd/test_layernorm2d_fwd_fp32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/normalization_fwd/test_layernorm4d_fwd_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/permute_scale/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/permute_scale/test_permute_scale.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/pool/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/pool/test_avg_pool2d_bwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/pool/test_avg_pool2d_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/pool/test_avg_pool3d_bwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/pool/test_avg_pool3d_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/pool/test_max_pool2d_bwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/pool/test_max_pool2d_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/pool/test_max_pool3d_bwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/pool/test_max_pool3d_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/pool/test_pool_fwd_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/position_embedding/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/position_embedding/position_embedding.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/reduce/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/reduce/reduce_no_index.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/reduce/reduce_with_index.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/reference_conv_fwd/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/reference_conv_fwd/reference_conv_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/scatter_gather/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/scatter_gather/scatter_gather.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/smfmac_op/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/smfmac_op/smfmac_op.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/smfmac_op/smfmac_op_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/smfmac_op/smfmac_op_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/softmax/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/softmax/test_softmax_interface.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/softmax/test_softmax_rank3.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/softmax/test_softmax_rank4.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/softmax/test_softmax_ut_cases.inc +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/softmax/test_softmax_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/space_filling_curve/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/space_filling_curve/space_filling_curve.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/transpose/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/transpose/test_transpose_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/wmma_op/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/wmma_op/wmma_op.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/wmma_op/wmma_op_util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/wrapper/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/wrapper/test_wrapper_copy.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/wrapper/test_wrapper_gemm_xdl.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/wrapper/test_wrapper_layout.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/wrapper/test_wrapper_partition.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/test/wrapper/test_wrapper_tensor.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/tile_engine/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/tile_engine/include/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/tile_engine/ops/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/tile_engine/ops/gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/tile_engine/ops/gemm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/tile_engine/ops/gemm/benchmark_gemm.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/tile_engine/ops/gemm/benchmark_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/tile_engine/ops/gemm/codegen_utils.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/tile_engine/ops/gemm/configs/benchmark.json +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/tile_engine/ops/gemm/configs/custom_ci_config.json +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/tile_engine/ops/gemm/configs/default_config.json +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/tile_engine/ops/gemm/configs/user_provided_config.json +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/tile_engine/ops/gemm/gemm_host_api.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/tile_engine/ops/gemm/gemm_instance_builder.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/tile_engine/ops/gemm/gemm_profiler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/composable_kernel/tile_engine/ops/gemm/json_config.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/.git +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/.github/ISSUE_TEMPLATE/bug_report.yml +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/.github/ISSUE_TEMPLATE/config.yml +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/.github/ISSUE_TEMPLATE/documentation_request.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/.github/ISSUE_TEMPLATE/feature_request.yml +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/.github/ISSUE_TEMPLATE/submit_question.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/.github/workflows/auto-label-issues.yml +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/.github/workflows/blossom-ci.yml +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/.github/workflows/labeler.yml +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/.github/workflows/new-issues-to-triage-projects.yml +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/.github/workflows/stale.yml +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/.gitignore +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/.gitmodules +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/CHANGELOG.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/CITATION.cff +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/CONTRIBUTORS.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/CUDA.cmake +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/Doxyfile +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/EULA.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/LICENSE.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/PUBLICATIONS.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/bin2hex.cmake +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/cmake/CTestTestfile.configure.cmake +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/cmake/CTestTestfile.test.configure.cmake +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/cmake/NvidiaCutlassConfig.cmake.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/cmake/NvidiaCutlassPackageConfig.cmake +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/cmake/googletest.cmake +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/cmake/nop.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/cmake/version_extended.h.in +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/cuBLAS.cmake +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/cuDNN.cmake +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/customConfigs.cmake +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/00_basic_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/00_basic_gemm/basic_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/01_cutlass_utilities/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/01_cutlass_utilities/cutlass_utilities.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/02_dump_reg_shmem/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/02_dump_reg_shmem/dump_reg_shmem.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/03_visualize_layout/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/03_visualize_layout/options.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/03_visualize_layout/register_layout.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/03_visualize_layout/register_layout.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/03_visualize_layout/visualize_layout.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/03_visualize_layout/visualize_layout.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/04_tile_iterator/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/04_tile_iterator/tile_iterator.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/05_batched_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/05_batched_gemm/batched_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/06_splitK_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/06_splitK_gemm/splitk_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/07_volta_tensorop_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/07_volta_tensorop_gemm/volta_tensorop_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/08_turing_tensorop_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/08_turing_tensorop_gemm/turing_tensorop_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/09_turing_tensorop_conv2dfprop/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/09_turing_tensorop_conv2dfprop/turing_tensorop_conv2dfprop.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/10_planar_complex/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/10_planar_complex/planar_complex.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/11_planar_complex_array/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/11_planar_complex_array/planar_complex_array.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/12_gemm_bias_relu/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/12_gemm_bias_relu/gemm_bias_relu.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/b2b_conv2d_run.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/b2b_gemm_run.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/b2b_grouped_gemm_run.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/b2b_interleaved_conv2d_run.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/b2b_interleaved_gemm_run.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/device/b2b_gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/device/b2b_implicit_gemm_convolution.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/fused_two_convs_f16_sm75_rf.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/fused_two_convs_f16_sm75_shmem.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/fused_two_convs_f16_sm80_rf.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/fused_two_convs_f16_sm80_shmem.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/fused_two_convs_s8_sm75_rf.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/fused_two_convs_s8_sm75_shmem.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/fused_two_convs_s8_sm80_rf.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/fused_two_convs_s8_sm80_shmem.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/fused_two_gemms_f16_sm75_rf.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/fused_two_gemms_f16_sm75_shmem.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/fused_two_gemms_f16_sm80_rf.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/fused_two_gemms_f16_sm80_shmem.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/fused_two_gemms_grouped_f16_sm80_rf.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/fused_two_gemms_s8_sm75_rf.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/fused_two_gemms_s8_sm75_shmem.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/fused_two_gemms_s8_sm80_rf.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/fused_two_gemms_s8_sm80_shmem.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/kernel/b2b_gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/kernel/b2b_gemm_grouped_problem_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/kernel/b2b_implicit_gemm_convolution.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/kernel/default_b2b_conv2d_fprop.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/kernel/default_b2b_conv2d_fprop_sm75.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/kernel/default_b2b_conv2d_fprop_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/kernel/default_b2b_conv2d_fprop_smem_accumulator_sm75.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/kernel/default_b2b_conv2d_fprop_smem_accumulator_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/kernel/default_b2b_gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/kernel/default_b2b_gemm_smem_accumulator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/kernel/grouped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/reference/device/tensor_scale_bias.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/test_run.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/threadblock/b2b_implicit_gemm_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/threadblock/b2b_implicit_gemm_multistage_smem_accumulator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/threadblock/b2b_implicit_gemm_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/threadblock/b2b_implicit_gemm_pipelined_smem_accumulator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/threadblock/b2b_mma_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/threadblock/b2b_mma_base_smem_accumulator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/threadblock/b2b_mma_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/threadblock/b2b_mma_multistage_smem_accumulator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/threadblock/b2b_mma_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/threadblock/b2b_mma_pipelined_smem_accumulator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/threadblock/default_b2b_mma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/threadblock/default_b2b_mma_smem_accumulator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/13_two_tensor_op_fusion/threadblock/grouped_threadblock_swizzle.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/14_ampere_tf32_tensorop_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/14_ampere_tf32_tensorop_gemm/ampere_tf32_tensorop_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/15_ampere_sparse_tensorop_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/15_ampere_sparse_tensorop_gemm/ampere_sparse_tensorop_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/15_ampere_sparse_tensorop_gemm/ampere_sparse_tensorop_gemm_universal.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/15_ampere_sparse_tensorop_gemm/ampere_sparse_tensorop_gemm_with_visitor.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/16_ampere_tensorop_conv2dfprop/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/16_ampere_tensorop_conv2dfprop/ampere_tensorop_conv2dfprop.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/17_fprop_per_channel_bias/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/17_fprop_per_channel_bias/fprop_per_channel_bias.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/18_ampere_fp64_tensorop_affine2_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/18_ampere_fp64_tensorop_affine2_gemm/ampere_fp64_tensorop_affine2_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/19_tensorop_canonical/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/19_tensorop_canonical/tensorop_canonical.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/20_simt_canonical/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/20_simt_canonical/simt_canonical.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/21_quaternion_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/21_quaternion_gemm/quaternion_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/22_quaternion_conv/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/22_quaternion_conv/quaternion_conv.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/23_ampere_gemm_operand_reduction_fusion/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/23_ampere_gemm_operand_reduction_fusion/ampere_gemm_operand_reduction_fusion.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/24_gemm_grouped/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/24_gemm_grouped/gemm_grouped.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/25_ampere_fprop_mainloop_fusion/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/25_ampere_fprop_mainloop_fusion/ampere_3d_fprop_mainloop_fusion.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/25_ampere_fprop_mainloop_fusion/ampere_fprop_mainloop_fusion.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/26_ampere_wgrad_mainloop_fusion/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/26_ampere_wgrad_mainloop_fusion/ampere_wgrad_mainloop_fusion.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/27_ampere_3xtf32_fast_accurate_tensorop_gemm/27_ampere_3xtf32_fast_accurate_tensorop_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/27_ampere_3xtf32_fast_accurate_tensorop_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/28_ampere_3xtf32_fast_accurate_tensorop_fprop/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/28_ampere_3xtf32_fast_accurate_tensorop_fprop/ampere_3xtf32_fast_accurate_tensorop_fprop.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/29_ampere_3xtf32_fast_accurate_tensorop_complex_gemm/29_3xtf32_complex_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/29_ampere_3xtf32_fast_accurate_tensorop_complex_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/30_wgrad_split_k/30_wgrad_split_k.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/30_wgrad_split_k/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/31_basic_syrk/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/31_basic_syrk/basic_syrk.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/32_basic_trmm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/32_basic_trmm/basic_trmm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/33_ampere_3xtf32_tensorop_symm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/33_ampere_3xtf32_tensorop_symm/ampere_3xtf32_tensorop_symm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/34_transposed_conv2d/34_transposed_conv2d.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/34_transposed_conv2d/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/35_gemm_softmax/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/35_gemm_softmax/gemm_softmax.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/35_gemm_softmax/gemm_with_epilogue_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/35_gemm_softmax/gemm_with_softmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/36_gather_scatter_fusion/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/36_gather_scatter_fusion/gather_scatter_fusion.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/37_gemm_layernorm_gemm_fusion/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/37_gemm_layernorm_gemm_fusion/gemm_layernorm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/37_gemm_layernorm_gemm_fusion/gemm_with_epilogue_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/37_gemm_layernorm_gemm_fusion/gemm_with_layernorm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/38_syr2k_grouped/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/38_syr2k_grouped/syr2k_grouped.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/39_gemm_permute/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/39_gemm_permute/gemm_permute.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/39_gemm_permute/layouts.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/39_gemm_permute/permute_info.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/40_cutlass_py/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/40_cutlass_py/conv2d.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/40_cutlass_py/customizable/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/40_cutlass_py/customizable/conv2d.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/40_cutlass_py/customizable/gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/40_cutlass_py/customizable/gemm_grouped.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/40_cutlass_py/customizable/grouped_gemm_problem_size.csv +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/40_cutlass_py/gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/40_cutlass_py/gemm_grouped.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/debug_utils.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/default_fmha_grouped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/epilogue/epilogue_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/epilogue/epilogue_rescale_output.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/epilogue/epilogue_thread_apply_logsumexp.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/fmha_backward_test.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/fmha_grouped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/fmha_grouped_problem_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/fused_multi_head_attention_backward.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/fused_multihead_attention_fixed_seqlen.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/fused_multihead_attention_variable_seqlen.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/gemm/custom_mma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/gemm/custom_mma_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/gemm/custom_mma_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/gemm/custom_mma_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/gemm/find_default_mma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/gemm/mma_accum_lambda_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/gemm/mma_from_smem.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/gemm_kernel_utils.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/iterators/default_warp_iterator_from_smem.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/iterators/epilogue_predicated_tile_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/iterators/make_residual_last.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/iterators/predicated_tile_access_iterator_residual_last.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/iterators/predicated_tile_iterator_residual_last.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/iterators/transpose_warp_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/iterators/warp_iterator_from_smem.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/kernel_backward.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/kernel_forward.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/piped_subprocess.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/41_fused_multi_head_attention/transform/tile_smem_loader.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/42_ampere_tensorop_group_conv/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/42_ampere_tensorop_group_conv/ampere_tensorop_group_conv.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/43_ell_block_sparse_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/43_ell_block_sparse_gemm/ell_block_sparse_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/config.json +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/fixed_impl/epilogue/threadblock/default_bias_act_epilogue_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/fixed_impl/epilogue/threadblock/default_thread_map_tensor_op_for_fused_bias.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/fixed_impl/epilogue/threadblock/fused_bias_act_epilogue.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/fixed_impl/epilogue/threadblock/output_tile_thread_map_for_fused_bias.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/fixed_impl/epilogue/warp/fused_bias_act_fragment_iterator_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/fixed_impl/gemm/warp/mma_tensor_op_fragment_iterator_without_output_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/gen_all_code.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/gen_cmake.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/gen_customized_epilogue.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/gen_device.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/gen_ir.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/gen_kernel.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/gen_sample.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/gen_threadblock.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/gen_turing_and_volta.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/gen_verify.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/generate.sh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/helper.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/ir_gen/replace_fix_impl_header.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/leaky_bias.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/44_multi_gemm_ir_and_codegen/utils.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/45_dual_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/45_dual_gemm/device/dual_gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/45_dual_gemm/dual_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/45_dual_gemm/dual_gemm_common.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/45_dual_gemm/dual_gemm_run.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/45_dual_gemm/kernel/dual_gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/45_dual_gemm/test_run.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/45_dual_gemm/thread/left_silu_and_mul.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/45_dual_gemm/threadblock/dual_epilogue.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/45_dual_gemm/threadblock/dual_mma_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/45_dual_gemm/threadblock/dual_mma_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/46_depthwise_simt_conv2dfprop/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/46_depthwise_simt_conv2dfprop/depthwise_simt_conv2dfprop.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/47_ampere_gemm_universal_streamk/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/47_ampere_gemm_universal_streamk/ampere_gemm_universal_streamk.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/47_ampere_gemm_universal_streamk/ampere_gemm_universal_streamk_broadcast.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/48_hopper_warp_specialized_gemm/48_hopper_warp_specialized_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/48_hopper_warp_specialized_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/49_hopper_gemm_with_collective_builder/49_collective_builder.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/49_hopper_gemm_with_collective_builder/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/50_hopper_gemm_with_epilogue_swizzle/50_hopper_gemm_with_epilogue_swizzle.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/50_hopper_gemm_with_epilogue_swizzle/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/51_hopper_gett/51_hopper_gett.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/51_hopper_gett/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/51_hopper_gett/gett_kernel.cuh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/52_hopper_gather_scatter_fusion/52_hopper_gather_scatter_fusion.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/52_hopper_gather_scatter_fusion/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/52_hopper_gather_scatter_fusion/gather_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/52_hopper_gather_scatter_fusion/gather_kernel.cuh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/52_hopper_gather_scatter_fusion/scatter_epilogue.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/53_hopper_gemm_permute/53_hopper_gemm_permute.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/53_hopper_gemm_permute/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/53_hopper_gemm_permute/permute_kernel.cuh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/53_hopper_gemm_permute/permute_traits.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/54_hopper_fp8_warp_specialized_gemm/54_hopper_fp8_warp_specialized_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/54_hopper_fp8_warp_specialized_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/54_hopper_fp8_warp_specialized_gemm/hopper_fp8_commandline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/55_hopper_mixed_dtype_gemm/55_hopper_int4_bf16_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/55_hopper_mixed_dtype_gemm/55_hopper_int4_fp8_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/55_hopper_mixed_dtype_gemm/55_hopper_mixed_dtype_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/55_hopper_mixed_dtype_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/55_hopper_mixed_dtype_gemm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/55_hopper_mixed_dtype_gemm/mixed_dtype_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/56_hopper_ptr_array_batched_gemm/56_hopper_ptr_array_batched_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/56_hopper_ptr_array_batched_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/57_hopper_grouped_gemm/57_hopper_grouped_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/57_hopper_grouped_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/58_ada_fp8_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/58_ada_fp8_gemm/ada_fp8_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/59_ampere_gather_scatter_conv/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/59_ampere_gather_scatter_conv/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/59_ampere_gather_scatter_conv/ampere_conv_kernel.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/59_ampere_gather_scatter_conv/ampere_gather_scatter_conv.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/60_cutlass_import/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/60_cutlass_import/main.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/61_hopper_gemm_with_topk_and_softmax/61_hopper_gemm_with_topk_and_softmax.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/61_hopper_gemm_with_topk_and_softmax/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/62_hopper_sparse_gemm/62_hopper_sparse_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/62_hopper_sparse_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/63_hopper_gemm_with_weight_prefetch/63_hopper_gemm_with_weight_prefetch.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/63_hopper_gemm_with_weight_prefetch/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/63_hopper_gemm_with_weight_prefetch/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/63_hopper_gemm_with_weight_prefetch/collective/builder.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/63_hopper_gemm_with_weight_prefetch/collective/dispatch_policy_extra.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/63_hopper_gemm_with_weight_prefetch/collective/sm90_mma_tma_gmma_ss_warpspecialized_with_prefetch.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/63_hopper_gemm_with_weight_prefetch/gemm_with_weight_prefetch_commandline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/63_hopper_gemm_with_weight_prefetch/kernel/sm90_gemm_tma_warpspecialized_with_prefetch.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/63_hopper_gemm_with_weight_prefetch/pipeline/prefetch_pipeline_sm90.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/64_ada_fp8_gemm_grouped/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/64_ada_fp8_gemm_grouped/ada_fp8_gemm_grouped.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/65_distributed_gemm/65_distributed_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/65_distributed_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/65_distributed_gemm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/65_distributed_gemm/REQUIREMENTS.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/67_hopper_fp8_warp_specialized_gemm_with_blockwise_scaling/67_hopper_fp8_warp_specialized_gemm_with_blockwise_scaling.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/67_hopper_fp8_warp_specialized_gemm_with_blockwise_scaling/67_hopper_fp8_warp_specialized_gemm_with_groupwise_scaling.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/67_hopper_fp8_warp_specialized_gemm_with_blockwise_scaling/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/67_hopper_fp8_warp_specialized_gemm_with_blockwise_scaling/hopper_fp8_commandline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/68_hopper_fp8_warp_specialized_grouped_gemm_with_blockwise_scaling/68_hopper_fp8_warp_specialized_grouped_gemm_with_blockwise_scaling.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/68_hopper_fp8_warp_specialized_grouped_gemm_with_blockwise_scaling/68_hopper_fp8_warp_specialized_grouped_gemm_with_blockwise_scaling_with_sparse_groups.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/68_hopper_fp8_warp_specialized_grouped_gemm_with_blockwise_scaling/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/68_hopper_fp8_warp_specialized_grouped_gemm_with_blockwise_scaling/hopper_fp8_commandline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/69_hopper_mixed_dtype_grouped_gemm/69_hopper_int4_bf16_grouped_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/69_hopper_mixed_dtype_grouped_gemm/69_hopper_int4_fp8_grouped_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/69_hopper_mixed_dtype_grouped_gemm/69_hopper_mixed_dtype_grouped_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/69_hopper_mixed_dtype_grouped_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/69_hopper_mixed_dtype_grouped_gemm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/69_hopper_mixed_dtype_grouped_gemm/grouped_mixed_dtype_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/70_blackwell_gemm/70_blackwell_fp16_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/70_blackwell_gemm/70_blackwell_fp8_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/70_blackwell_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/71_blackwell_gemm_with_collective_builder/71_blackwell_gemm_with_collective_builder.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/71_blackwell_gemm_with_collective_builder/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/72_blackwell_narrow_precision_gemm/72a_blackwell_nvfp4_bf16_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/72_blackwell_narrow_precision_gemm/72b_blackwell_nvfp4_nvfp4_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/72_blackwell_narrow_precision_gemm/72c_blackwell_mixed_mxfp8_bf16_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/72_blackwell_narrow_precision_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/73_blackwell_gemm_preferred_cluster/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/73_blackwell_gemm_preferred_cluster/blackwell_gemm_preferred_cluster.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/74_blackwell_gemm_streamk/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/74_blackwell_gemm_streamk/blackwell_gemm_streamk.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/75_blackwell_grouped_gemm/75_blackwell_grouped_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/75_blackwell_grouped_gemm/75_blackwell_grouped_gemm_block_scaled.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/75_blackwell_grouped_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/76_blackwell_conv/76_blackwell_conv_dgrad.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/76_blackwell_conv/76_blackwell_conv_fprop.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/76_blackwell_conv/76_blackwell_conv_wgrad.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/76_blackwell_conv/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/77_blackwell_fmha.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/77_blackwell_fmha_bwd.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/77_blackwell_fmha_gen.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/77_blackwell_mla.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/77_blackwell_mla_fwd.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/collective/fmha_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/collective/fmha_fusion.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/collective/sm100_fmha_fwd_epilogue_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/collective/sm100_fmha_fwd_mainloop_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/collective/sm100_fmha_gen_epilogue_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/collective/sm100_fmha_gen_mainloop_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/collective/sm100_fmha_load_cpasync_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/collective/sm100_fmha_load_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/collective/sm100_fmha_mla_fwd_mainloop_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/collective/sm100_fmha_mla_load_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/common/pipeline_mla.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/common/pow_2.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/device/fmha.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/device/fmha_device_bwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/device/sm100_mla.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/kernel/fmha_causal_tile_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/kernel/fmha_kernel_bwd_convert.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/kernel/fmha_kernel_bwd_sum_OdO.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/kernel/fmha_options.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/kernel/fmha_tile_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/kernel/sm100_fmha_bwd_kernel_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/kernel/sm100_fmha_bwd_mla_kernel_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/kernel/sm100_fmha_fwd_kernel_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/kernel/sm100_fmha_gen_kernel_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/kernel/sm100_fmha_mla_reduction.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/kernel/sm100_fmha_mla_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/kernel/sm100_mla_tile_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/reference/fmha_bwd_reference.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/reference/fmha_fwd_gen_reference.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/reference/fmha_fwd_reference.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/reference/fmha_mla_reference.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/77_blackwell_fmha/reference/reference_abs_error.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/78_blackwell_emulated_bf16x9_gemm/78_blackwell_emulated_bf16x9_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/78_blackwell_emulated_bf16x9_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/79_blackwell_geforce_gemm/79a_blackwell_geforce_nvfp4_bf16_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/79_blackwell_geforce_gemm/79b_blackwell_geforce_nvfp4_nvfp4_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/79_blackwell_geforce_gemm/79c_blackwell_geforce_mixed_mxfp8_mxfp6_bf16_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/79_blackwell_geforce_gemm/79d_blackwell_geforce_nvfp4_grouped_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/79_blackwell_geforce_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/80_blackwell_geforce_sparse_gemm/80a_blackwell_geforce_mxfp8_bf16_sparse_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/80_blackwell_geforce_sparse_gemm/80b_blackwell_geforce_nvfp4_nvfp4_sparse_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/80_blackwell_geforce_sparse_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/81_blackwell_gemm_blockwise/81_blackwell_gemm_blockwise.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/81_blackwell_gemm_blockwise/81_blackwell_gemm_groupwise.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/81_blackwell_gemm_blockwise/81_blackwell_grouped_gemm_blockwise.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/81_blackwell_gemm_blockwise/81_blackwell_grouped_gemm_groupwise.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/81_blackwell_gemm_blockwise/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/81_blackwell_gemm_blockwise/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/82_blackwell_distributed_gemm/82_blackwell_distributed_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/82_blackwell_distributed_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/82_blackwell_distributed_gemm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/82_blackwell_distributed_gemm/REQUIREMENTS.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/83_blackwell_sparse_gemm/83_blackwell_sparse_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/83_blackwell_sparse_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/84_blackwell_narrow_precision_sparse_gemm/84a_blackwell_nvfp4_bf16_sparse_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/84_blackwell_narrow_precision_sparse_gemm/84b_blackwell_mixed_mxfp8_bf16_sparse_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/84_blackwell_narrow_precision_sparse_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/86_blackwell_mixed_dtype_gemm/86_blackwell_mixed_dtype.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/86_blackwell_mixed_dtype_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/86_blackwell_mixed_dtype_gemm/mixed_dtype_helper.cuh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/87_blackwell_geforce_gemm_blockwise/87a_blackwell_geforce_fp8_bf16_gemm_blockwise.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/87_blackwell_geforce_gemm_blockwise/87b_blackwell_geforce_fp8_bf16_gemm_groupwise.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/87_blackwell_geforce_gemm_blockwise/87c_blackwell_geforce_fp8_bf16_grouped_gemm_groupwise.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/87_blackwell_geforce_gemm_blockwise/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/87_blackwell_geforce_gemm_blockwise/utils.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/88_hopper_fmha.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/collective/fmha_collective_bwd_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/collective/fmha_collective_load.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/collective/fmha_collective_softmax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/collective/fmha_collective_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/collective/fmha_collective_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/collective/fmha_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/collective/fmha_epilogue.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/collective/fmha_epilogue_bwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/collective/fmha_fusion.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/device/device_universal.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/device/fmha_device_bwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/kernel/fmha_kernel_builder.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/kernel/fmha_kernel_bwd_convert.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/kernel/fmha_kernel_bwd_sum_OdO.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/kernel/fmha_kernel_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/kernel/fmha_kernel_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/kernel/fmha_options.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/kernel/fmha_tile_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/reference/fmha_bwd_reference.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/reference/fmha_reference.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/88_hopper_fmha/reference/reference_abs_error.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/89_sm103_fp4_ultra_gemm/89_sm103_fp4_ultra_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/89_sm103_fp4_ultra_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/90_sm103_fp4_ultra_grouped_gemm/90_sm103_fp4_ultra_grouped_gemm.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/90_sm103_fp4_ultra_grouped_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/91_fp4_gemv/91_fp4_gemv.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/91_fp4_gemv/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/92_blackwell_moe_gemm/92_blackwell_moe_gemm_fp4_grouped.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/92_blackwell_moe_gemm/92_blackwell_moe_gemm_fp4_regular.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/92_blackwell_moe_gemm/92_blackwell_moe_gemm_grouped.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/92_blackwell_moe_gemm/92_blackwell_moe_gemm_rcgrouped.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/92_blackwell_moe_gemm/92_blackwell_moe_gemm_regular.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/92_blackwell_moe_gemm/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/common/dist_gemm_helpers.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/common/gather_tensor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/common/helper.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/cute/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/cute/tutorial/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/cute/tutorial/blackwell/01_mma_sm100.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/cute/tutorial/blackwell/02_mma_tma_sm100.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/cute/tutorial/blackwell/03_mma_tma_multicast_sm100.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/cute/tutorial/blackwell/04_mma_tma_2sm_sm100.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/cute/tutorial/blackwell/05_mma_tma_epi_sm100.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/cute/tutorial/blackwell/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/cute/tutorial/blackwell/example_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/cute/tutorial/hopper/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/cute/tutorial/hopper/wgmma_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/cute/tutorial/hopper/wgmma_tma_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/cute/tutorial/sgemm_1.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/cute/tutorial/sgemm_2.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/cute/tutorial/sgemm_sm70.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/cute/tutorial/sgemm_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/cute/tutorial/tiled_copy.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/cute/tutorial/tiled_copy_if.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/ampere/call_bypass_dlpack.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/ampere/call_from_jit.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/ampere/dynamic_smem_size.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/ampere/elementwise_add.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/ampere/elementwise_apply.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/ampere/flash_attention_v2.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/ampere/hstu_attention.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/ampere/inline_ptx.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/ampere/sgemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/ampere/smem_allocator.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/ampere/tensorop_gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell/blockwise_gemm/blockwise_gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell/blockwise_gemm/contiguous_grouped_gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell/blockwise_gemm/masked_grouped_gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell/dense_blockscaled_gemm_persistent.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell/dense_gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell/dense_gemm_alpha_beta_persistent.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell/dense_gemm_persistent.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell/dense_gemm_software_pipeline.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell/fmha.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell/fmha_bwd.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell/grouped_blockscaled_gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell/grouped_gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell/mamba2_ssd/mamba2_ssd.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell/mamba2_ssd/mamba2_ssd_reference.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell/mamba2_ssd/mamba2_ssd_tile_scheduler.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell/mixed_input_gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell/mla.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell/programmatic_dependent_launch.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell/tutorial_gemm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell/tutorial_gemm/fp16_gemm_0.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/blackwell_geforce/dense_gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/cute/ffi/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/cute/ffi/jit_argument.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/cute/ffi/tensor.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/cute/torch_fake_tensor.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/hopper/dense_gemm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/hopper/dense_gemm_persistent.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/hopper/fmha.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/notebooks/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/notebooks/async_pipeline.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/notebooks/benchmark_autotune.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/notebooks/composed_layout.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/notebooks/cuda_graphs.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/notebooks/cute_layout_algebra.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/notebooks/data_types.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/notebooks/elementwise_add.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/notebooks/hello_world.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/notebooks/images/cuda_graphs_image.png +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/notebooks/print.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/notebooks/tensor.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/notebooks/tensorssa.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/utils/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/utils/fmha_helpers.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/utils/sparse_utils.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/CuTeDSL/utils/test_sparse_utils.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/deprecated/00_basic_gemm.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/deprecated/01_epilogue.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/deprecated/02_pytorch_extension_grouped_gemm.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/deprecated/03_basic_conv2d.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/deprecated/04_epilogue_visitor.ipynb +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/examples/python/deprecated/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/algorithm/axpby.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/algorithm/clear.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/algorithm/cooperative_copy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/algorithm/cooperative_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/algorithm/copy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/algorithm/fill.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/algorithm/functional.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/algorithm/gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/algorithm/prefer.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/algorithm/prefetch.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/algorithm/tensor_algorithms.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/algorithm/tensor_reduce.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/algorithm/tuple_algorithms.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/cluster_sm100.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/cluster_sm90.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/config.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/copy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/copy_sm100.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/copy_sm100_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/copy_sm50.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/copy_sm75.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/copy_sm80.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/copy_sm90.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/copy_sm90_desc.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/copy_sm90_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/mma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/mma_sm100.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/mma_sm100_desc.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/mma_sm100_umma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/mma_sm120.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/mma_sm120_sparse.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/mma_sm61.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/mma_sm70.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/mma_sm75.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/mma_sm80.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/mma_sm89.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/mma_sm90.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/mma_sm90_desc.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/mma_sm90_gmma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/mma_sm90_gmma_ext.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/mma_sm90_gmma_sparse.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/mma_sm90_gmma_sparse_ext.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/simd_sm100.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/tmem_allocator_sm100.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/arch/util.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/copy_atom.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/copy_traits.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/copy_traits_sm100.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/copy_traits_sm100_im2col.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/copy_traits_sm100_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/copy_traits_sm50.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/copy_traits_sm75.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/copy_traits_sm80.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/copy_traits_sm90.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/copy_traits_sm90_im2col.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/copy_traits_sm90_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/copy_traits_sm90_tma_swizzle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/mma_atom.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/mma_traits.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/mma_traits_sm100.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/mma_traits_sm120.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/mma_traits_sm120_sparse.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/mma_traits_sm61.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/mma_traits_sm70.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/mma_traits_sm75.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/mma_traits_sm80.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/mma_traits_sm89.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/mma_traits_sm90.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/mma_traits_sm90_gmma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/mma_traits_sm90_gmma_ext.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/mma_traits_sm90_gmma_sparse.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/mma_traits_sm90_gmma_sparse_ext.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/atom/partitioner.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/config.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/container/alignment.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/container/array.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/container/array_aligned.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/container/array_subbyte.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/container/bit_field.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/container/cuda_types.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/container/tuple.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/container/type_list.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/int_tuple.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/layout.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/layout_composed.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/numeric/arithmetic_tuple.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/numeric/complex.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/numeric/int.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/numeric/integer_sequence.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/numeric/integral_constant.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/numeric/integral_ratio.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/numeric/math.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/numeric/numeric_types.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/numeric/real.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/pointer.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/pointer_base.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/pointer_flagged.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/pointer_sparse.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/pointer_swizzle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/stride.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/swizzle.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/swizzle_layout.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/tensor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/tensor_impl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/tensor_zip.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/underscore.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/util/debug.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/util/print.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/util/print_latex.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/util/print_svg.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/util/print_tensor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cute/util/type_traits.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/aligned_buffer.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/arch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/barrier.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/cache_operation.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/config.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/grid_dependency_control.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/memory.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/memory_sm75.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/memory_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/mma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/mma_sm100.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/mma_sm50.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/mma_sm60.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/mma_sm61.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/mma_sm70.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/mma_sm75.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/mma_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/mma_sm89.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/mma_sm90.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/mma_sparse_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/mma_sparse_sm89.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/reg_reconfig.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/simd.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/simd_sm60.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/simd_sm61.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/synclog.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/wmma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/wmma_sm70.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/wmma_sm72.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/arch/wmma_sm75.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/array.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/array_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/array_subbyte.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/barrier.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/bfloat16.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/blas3.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/blas3_types.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/block_striped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/cluster_launch.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/constants.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/collective/builders/sm100_common.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/collective/builders/sm100_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/collective/builders/sm90_common.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/collective/builders/sm90_gmma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/collective/collective_builder.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/collective/collective_conv.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/collective/detail.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/collective/sm100_implicit_gemm_umma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/collective/sm90_implicit_gemm_gmma_ss_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/conv2d_problem_size.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/conv3d_problem_size.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/convnd_problem_shape.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/convolution.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/detail.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/device/conv_universal_adapter.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/device/direct_convolution.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/device/implicit_gemm_convolution.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/device/implicit_gemm_convolution_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/dispatch_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/conv_universal.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/default_conv2d.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/default_conv2d_dgrad.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/default_conv2d_fprop.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/default_conv2d_fprop_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/default_conv2d_fprop_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/default_conv2d_fprop_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/default_conv2d_fprop_with_reduction.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/default_conv2d_group_fprop.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/default_conv2d_wgrad.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/default_conv2d_wgrad_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/default_conv3d_dgrad.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/default_conv3d_fprop.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/default_conv3d_fprop_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/default_conv3d_fprop_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/default_conv3d_wgrad.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/default_deconv2d.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/default_deconv2d_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/default_deconv3d.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/default_deconv3d_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/default_depthwise_fprop.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/direct_convolution.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/implicit_gemm_convolution.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/implicit_gemm_convolution_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/implicit_gemm_convolution_strided_dgrad.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/implicit_gemm_convolution_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/implicit_gemm_convolution_with_fused_epilogue.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/sm100_implicit_gemm_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/kernel/sm90_implicit_gemm_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/thread/depthwise_mma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv2d_dgrad_filter_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv2d_dgrad_filter_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv2d_dgrad_output_gradient_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv2d_dgrad_output_gradient_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv2d_fprop_activation_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv2d_fprop_activation_tile_access_iterator_few_channels.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv2d_fprop_activation_tile_access_iterator_fixed_channels.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv2d_fprop_activation_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv2d_fprop_filter_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv2d_fprop_filter_tile_access_iterator_few_channels.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv2d_fprop_filter_tile_access_iterator_fixed_channels.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv2d_fprop_filter_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv2d_params.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv2d_tile_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv2d_wgrad_activation_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv2d_wgrad_activation_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv2d_wgrad_output_gradient_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv2d_wgrad_output_gradient_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv3d_dgrad_filter_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv3d_dgrad_filter_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv3d_dgrad_output_gradient_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv3d_dgrad_output_gradient_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv3d_fprop_activation_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv3d_fprop_activation_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv3d_fprop_filter_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv3d_fprop_filter_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv3d_params.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv3d_wgrad_activation_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv3d_wgrad_activation_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv3d_wgrad_output_gradient_tile_access_iterator_analytic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/conv3d_wgrad_output_gradient_tile_access_iterator_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/depthwise_direct_conv_params.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/depthwise_fprop_activation_tile_access_iterator_direct_conv_fixed_stride_dilation.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/depthwise_fprop_activation_tile_access_iterator_direct_conv_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/depthwise_fprop_direct_conv_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/depthwise_fprop_filter_tile_access_iterator_direct_conv_optimized.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/depthwise_fprop_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/depthwise_mma_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/depthwise_mma_core_with_lane_access_size.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/implicit_gemm_fprop_fusion_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/implicit_gemm_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/implicit_gemm_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/implicit_gemm_wgrad_fusion_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/predicated_scale_bias_vector_access_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/predicated_scale_bias_vector_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/threadblock/threadblock_swizzle.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/warp/mma_depthwise_simt.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/warp/mma_depthwise_simt_tile_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/conv/warp/scale_bias_relu_transform.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/coord.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/core_io.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/cuda_host_adapter.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/cutlass.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/detail/blockwise_scale_layout.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/detail/cluster.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/detail/collective/mixed_input_utils.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/detail/collective/sm103_kernel_type.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/detail/collective.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/detail/dependent_false.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/detail/helper_macros.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/detail/layout.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/detail/mainloop_fusion_helper_scale_factor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/detail/mma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/detail/sm100_blockscaled_layout.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/detail/sm100_mixed_dtype_blockwise_layout.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/detail/sm100_tmem_helper.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/detail/sm103_blockscaled_layout.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/device_kernel.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/builders/sm100_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/builders/sm103_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/builders/sm120_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/builders/sm120_common.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/builders/sm90_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/builders/sm90_common.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/collective_builder.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/collective_epilogue.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/default_epilogue.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/default_epilogue_array.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/detail.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/epilogue_tensor_broadcast.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/sm100_epilogue_array_nosmem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/sm100_epilogue_array_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/sm100_epilogue_nosmem.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/sm100_epilogue_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/sm70_epilogue_vectorized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/sm70_epilogue_vectorized_array.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/sm90_epilogue_array_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/sm90_epilogue_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/collective/sm90_epilogue_tma_warpspecialized_bias_elementwise.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/dispatch_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/fusion/callbacks.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/fusion/operations.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/fusion/sm100_callbacks_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/fusion/sm100_visitor_compute_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/fusion/sm100_visitor_store_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/fusion/sm120_callbacks_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/fusion/sm120_visitor_store_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/fusion/sm90_callbacks_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/fusion/sm90_visitor_compute_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/fusion/sm90_visitor_load_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/fusion/sm90_visitor_store_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/fusion/sm90_visitor_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/fusion/sm90_visitor_topk_softmax.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/activation.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/conversion_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/detail.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/linear_combination.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/linear_combination_bias_elementwise.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/linear_combination_bias_relu.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/linear_combination_clamp.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/linear_combination_dgelu.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/linear_combination_drelu.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/linear_combination_gelu.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/linear_combination_generic.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/linear_combination_generic_with_scaling.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/linear_combination_hardswish.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/linear_combination_leaky_relu.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/linear_combination_params.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/linear_combination_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/linear_combination_relu.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/linear_combination_relu0.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/linear_combination_residual_block.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/linear_combination_sigmoid.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/linear_combination_silu.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/linear_combination_tensor_broadcast.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/linear_combination_with_elementwise.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/reduction_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/thread/scale_type.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_complex_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_complex_tensor_op_blas3.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_direct_store.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_simt.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_tensor_op_blas3.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_volta_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_with_reduction.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/default_epilogue_wmma_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/default_thread_map_simt.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/default_thread_map_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/default_thread_map_volta_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/default_thread_map_wmma_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/direct_store_epilogue_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/epilogue.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/epilogue_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/epilogue_base_streamk.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/epilogue_depthwise.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/epilogue_direct_store.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/epilogue_gemm_k_reduction.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/epilogue_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/epilogue_smem_accumulator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/epilogue_streamk_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/epilogue_visitor_with_softmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/epilogue_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/epilogue_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/epilogue_with_reduction.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/epilogue_with_scaling_factor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/epilogue_with_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/epilogue_with_visitor_callbacks.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/epilogue_workspace.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/fusion/visitor_2x.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/fusion/visitor_compute.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/fusion/visitor_load.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/fusion/visitor_store.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/fusion/visitors.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/interleaved_epilogue.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/output_iterator_parameter.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/output_tile_thread_map.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/predicated_tile_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/predicated_tile_iterator_affine.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/predicated_tile_iterator_affine_layout_params.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/predicated_tile_iterator_blas3.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/predicated_tile_iterator_conv.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/predicated_tile_iterator_direct_conv.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/predicated_tile_iterator_params.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/predicated_tile_iterator_predicates.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/predicated_tile_iterator_strided_dgrad.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/shared_load_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/shared_load_iterator_mixed.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/threadblock/shared_load_iterator_pitch_linear.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/warp/fragment_iterator_complex_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/warp/fragment_iterator_gaussian_complex_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/warp/fragment_iterator_simt.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/warp/fragment_iterator_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/warp/fragment_iterator_volta_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/warp/fragment_iterator_wmma_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/warp/simt_policy.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/warp/tensor_op_policy.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/warp/tile_iterator_simt.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/warp/tile_iterator_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/warp/tile_iterator_tensor_op_mixed.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/warp/tile_iterator_volta_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/warp/tile_iterator_wmma_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/warp/volta_tensor_op_policy.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/epilogue/warp/wmma_tensor_op_policy.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/exmy_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/experimental/distributed/device/detail.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/experimental/distributed/device/dist_gemm_universal_wrapper.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/experimental/distributed/device/full_barrier.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/experimental/distributed/kernel/detail.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/experimental/distributed/kernel/dist_gemm_kernel_wrapper.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/experimental/distributed/kernel/full_barrier.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/experimental/distributed/schedules/dist_gemm_1d_schedules.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/experimental/distributed/schedules/dist_gemm_base_schedule.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/fast_math.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/float8.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/float_subbyte.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/floating_point_nvrtc.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/functional.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm100_9xBF16_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm100_blockscaled_mixed_tma_cpasync_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm100_blockscaled_sparse_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm100_blockscaled_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm100_blockwise_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm100_common.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm100_cpasync_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm100_mixed_input_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm100_mixed_tma_cpasync_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm100_pipeline_carveout.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm100_simt_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm100_sparse_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm100_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm103_blockscaled_umma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm120_blockscaled_mma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm120_blockscaled_sparse_mma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm120_blockwise_mma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm120_common.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm120_mma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm120_sparse_mma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm1xx_common.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm1xx_sparse_config.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm90_common.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm90_gmma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm90_sparse_config.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/builders/sm90_sparse_gmma_builder.inl +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/collective_builder.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/collective_builder_decl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/collective_mma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/collective_mma_decl.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/fp8_accumulation.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm100_blockscaled_mma_array_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm100_blockscaled_mma_mixed_tma_cpasync_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm100_blockscaled_mma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm100_blockscaled_sparse_mma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm100_mma_array_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm100_mma_array_warpspecialized_blockwise_scaling.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm100_mma_array_warpspecialized_emulated.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm100_mma_array_warpspecialized_rcggemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm100_mma_cpasync_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm100_mma_mixed_tma_cpasync_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm100_mma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm100_mma_warpspecialized_blockwise_scaling.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm100_mma_warpspecialized_emulated.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm100_mma_warpspecialized_mixed_input.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm100_sparse_mma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm103_blockscaled_mma_array_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm103_blockscaled_mma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm120_blockscaled_mma_array_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm120_blockscaled_mma_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm120_blockscaled_sparse_mma_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm120_mma_array_tma_blockwise_scaling.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm120_mma_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm120_mma_tma_blockwise_scaling.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm120_sparse_mma_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm70_mma_twostage.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm80_mma_array_multistage.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm80_mma_multistage.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm90_mma_array_tma_gmma_rs_warpspecialized_mixed_input.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm90_mma_array_tma_gmma_ss_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm90_mma_array_tma_gmma_ss_warpspecialized_fp8.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm90_mma_array_tma_gmma_ss_warpspecialized_fp8_blockwise_scaling.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm90_mma_multistage_gmma_rs_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm90_mma_multistage_gmma_ss_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm90_mma_tma_gmma_rs_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm90_mma_tma_gmma_rs_warpspecialized_mixed_input.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm90_mma_tma_gmma_ss.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm90_mma_tma_gmma_ss_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm90_mma_tma_gmma_ss_warpspecialized_fp8.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm90_mma_tma_gmma_ss_warpspecialized_fp8_blockwise_scaling.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm90_sparse_mma_tma_gmma_ss_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/collective/sm90_sparse_mma_tma_gmma_ss_warpspecialized_fp8.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/base_grouped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/default_gemm_configuration.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/ell_gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemm_array.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemm_batched.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemm_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemm_grouped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemm_layernorm_mainloop_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemm_sparse.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemm_sparse_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemm_sparse_universal_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemm_sparse_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemm_sparse_with_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemm_splitk_parallel.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemm_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemm_universal_adapter.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemm_universal_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemm_universal_streamk_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemm_universal_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemm_universal_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemm_with_k_reduction.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemv.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/gemv_blockscaled.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/rank_2k.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/rank_2k_grouped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/rank_k.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/symm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/device/trmm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/dispatch_policy.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/gemm_enumerated_types.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/group_array_problem_shape.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_ell_gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemm_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemm_grouped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemm_grouped_per_group_scale.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemm_grouped_softmax_mainloop_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemm_layernorm_mainloop_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemm_planar_complex_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemm_sparse.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemm_sparse_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemm_sparse_universal_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemm_sparse_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemm_sparse_with_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemm_splitk_parallel.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemm_streamk_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemm_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemm_universal_with_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemm_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemm_with_broadcast.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemm_with_k_reduction.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemm_with_reduction.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_gemv.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_rank_2k.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_rank_2k_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_rank_2k_grouped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_rank_2k_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_rank_k.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_rank_k_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_rank_k_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_symm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_symm_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_symm_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_trmm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_trmm_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/default_trmm_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/ell_gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_array.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_batched.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_grouped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_grouped_per_group_scale.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_grouped_problem_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_grouped_softmax_mainloop_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_layernorm_mainloop_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_params.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_planar_complex_array.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_sparse_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_sparse_universal_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_splitk_parallel.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_streamk_with_fused_epilogue.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_transpose_operands.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_universal.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_universal_decl.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_universal_streamk.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_universal_with_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_universal_with_visitor_streamk.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_with_fused_epilogue.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemm_with_k_reduction.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemv.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemv_batched_strided.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/gemv_blockscaled.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/grouped_problem_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/params_sparse_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/params_universal_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/rank_2k_grouped.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/rank_2k_grouped_problem_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/rank_2k_transpose_operands.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/rank_2k_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/rank_k_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm100_gemm_array_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm100_gemm_array_tma_warpspecialized_input_transform.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm100_gemm_array_tma_warpspecialized_mma_transform.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm100_gemm_cpasync_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm100_gemm_mixed_tma_cpasync_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm100_gemm_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm100_gemm_tma_warpspecialized_input_transform.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm100_gemm_tma_warpspecialized_mixed_input_transform.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm100_gemm_tma_warpspecialized_mma_transform.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm100_sparse_gemm_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm100_static_tile_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm100_tile_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm100_tile_scheduler_group.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm100_tile_scheduler_stream_k.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm103_blockscaled_gemm_array_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm103_blockscaled_gemm_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm120_gemm_tma_warpspecialized_cooperative_asymmetric_dma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm70_gemm.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm70_gemm_array.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm90_gemm_array_tma_warpspecialized_cooperative.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm90_gemm_array_tma_warpspecialized_pingpong.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm90_gemm_tma.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm90_gemm_tma_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm90_gemm_tma_warpspecialized_cooperative.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm90_gemm_tma_warpspecialized_pingpong.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm90_gemm_warpspecialized.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm90_gemm_warpspecialized_cooperative.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm90_gemm_warpspecialized_pingpong.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm90_tile_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm90_tile_scheduler_group.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sm90_tile_scheduler_stream_k.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sparse_gemm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sparse_gemm_with_absmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/sparse_gemm_with_visitor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/static_tile_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/symm_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/tile_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/tile_scheduler_detail.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/tile_scheduler_params.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/kernel/trmm_universal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/thread/mma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/thread/mma_sm50.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/thread/mma_sm60.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/thread/mma_sm61.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_ell_mma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_gemv_core.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_mma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_mma_core.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_mma_core_simt.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_mma_core_sm70.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_mma_core_sm75.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_mma_core_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_mma_core_sparse_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_mma_core_with_access_size.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_mma_core_with_reduction.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_mma_core_wmma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_mma_layernorm_mainloop_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_mma_planar_complex_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_mma_planar_complex_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_mma_softmax_mainloop_fusion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_mma_with_reduction.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_multistage_mma_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_multistage_mma_complex_core.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_multistage_mma_complex_core_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_multistage_trmm_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_sparse_mma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/default_trmm.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/ell_mma_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/ell_mma_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/gemv.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/index_remat.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/mma_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/mma_blas3_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/mma_layernorm_mainloop_fusion_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/mma_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/mma_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/mma_planar_complex_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/mma_planar_complex_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/mma_planar_complex_pipelined.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/mma_singlestage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/mma_softmax_mainloop_fusion_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/mma_sparse_base.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/mma_sparse_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/mma_with_reduction_multistage.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/threadblock_swizzle.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/threadblock/threadblock_swizzle_streamk.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/default_mma_complex_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/default_mma_sparse_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/default_mma_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/default_mma_tensor_op_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/default_mma_with_reduction_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/default_mma_wmma_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/layernorm_scale_bias_transform.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_complex_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_complex_tensor_op_fast_f32.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_complex_tensor_op_tile_iterator_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_gaussian_complex_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_gaussian_complex_tensor_op_tile_iterator_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_mixed_input_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_simt.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_simt_policy.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_simt_tile_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_sparse_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_tensor_op_fast_f32.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_tensor_op_fragment_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_tensor_op_policy.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_tensor_op_sm70.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_tensor_op_tile_access_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_tensor_op_tile_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_tensor_op_tile_iterator_sm70.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_tensor_op_tile_iterator_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_tensor_op_tile_iterator_sparse.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_tensor_op_tile_iterator_wmma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_tensor_op_wmma.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/mma_with_reduction_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/scale_bias_tile_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/softmax_scale_bias_transform.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm/warp/tile_iterator_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm_coord.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/gemm_coord.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/half.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/integer_subbyte.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/kernel_hardware_info.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/kernel_hardware_info.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/kernel_launch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/layout/layout.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/layout/matrix.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/layout/permute.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/layout/pitch_linear.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/layout/tensor.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/layout/tensor_op_multiplicand_sm70.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/layout/tensor_op_multiplicand_sm75.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/layout/tensor_op_multiplicand_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/layout/vector.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/matrix.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/matrix_coord.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/matrix_shape.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/numeric_conversion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/numeric_size.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/numeric_types.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/pipeline/pipeline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/pipeline/sm100_pipeline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/pipeline/sm90_pipeline.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/pitch_linear_coord.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/platform/platform.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/predicate_vector.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/quaternion.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/real.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/reduction/device/reduce_split_k.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/reduction/device/tensor_reduce.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/reduction/device/tensor_reduce_affine_contiguous.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/reduction/device/tensor_reduce_affine_strided.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/reduction/kernel/reduce_softmax_final.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/reduction/kernel/reduce_split_k.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/reduction/kernel/tensor_reduce_affine_contiguous.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/reduction/kernel/tensor_reduce_affine_strided.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/reduction/thread/reduce.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/reduction/thread/reduction_operators.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/reduction/threadblock_swizzle.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/relatively_equal.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/semaphore.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/subbyte_reference.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/tensor_coord.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/tensor_ref.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/tensor_ref_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/tensor_view.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/tensor_view_planar_complex.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/tfloat32.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/thread/matrix.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/trace.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/collective/sm90_wgmma_transpose.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/device/transform_universal_adapter.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/kernel/filter_format_transformer.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/kernel/sm90_sparse_gemm_compressor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/kernel/sparse_gemm_compressor.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/pitch_linear_thread_map.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/thread/transpose.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/thread/unary_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/ell_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/ell_predicated_tile_access_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/ell_predicated_tile_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/predicated_scale_bias_vector_access_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/predicated_scale_bias_vector_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/predicated_tile_access_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/predicated_tile_access_iterator_2dthreadtile.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/predicated_tile_access_iterator_params.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/predicated_tile_access_iterator_triangular_matrix.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/predicated_tile_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/predicated_tile_iterator_2dthreadtile.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/predicated_tile_iterator_triangular_matrix.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/predicated_vector_access_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/regular_scale_bias_vector_access_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/regular_tile_access_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/regular_tile_access_iterator_pitch_linear.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/regular_tile_access_iterator_pitch_linear_direct_conv.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/regular_tile_access_iterator_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/regular_tile_access_iterator_tensor_op_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/regular_tile_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/regular_tile_iterator_pitch_linear.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/regular_tile_iterator_pitch_linear_2dthreadtile.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/regular_tile_iterator_tensor_op.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/regular_tile_iterator_tensor_op_sm70.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/threadblock/vector_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/transform/warp/vector_fragment_iterator.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/uint128.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/uint256.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/version.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/wmma_array.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/include/cutlass/workspace.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/pyproject.toml +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/cutlass/setup.cfg +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/flash_api.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/alibi.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/block_info.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/dropout.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim128_bf16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim128_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim128_fp16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim128_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim192_bf16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim192_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim192_fp16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim192_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim256_bf16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim256_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim256_fp16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim256_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim32_bf16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim32_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim32_fp16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim32_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim64_bf16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim64_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim64_fp16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim64_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim96_bf16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim96_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim96_fp16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_hdim96_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_kernel.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_launch_template.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_bwd_preprocess_kernel.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim128_bf16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim128_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim128_fp16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim128_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim192_bf16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim192_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim192_fp16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim192_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim256_bf16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim256_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim256_fp16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim256_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim32_bf16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim32_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim32_fp16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim32_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim64_bf16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim64_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim64_fp16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim64_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim96_bf16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim96_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim96_fp16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_hdim96_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_kernel.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_launch_template.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim128_bf16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim128_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim128_fp16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim128_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim192_bf16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim192_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim192_fp16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim192_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim256_bf16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim256_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim256_fp16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim256_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim32_bf16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim32_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim32_fp16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim32_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim64_bf16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim64_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim64_fp16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim64_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim96_bf16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim96_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim96_fp16_causal_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/flash_fwd_split_hdim96_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/generate_kernels.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/hardware_info.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/kernel_traits.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/mask.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/namespace_config.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/philox.cuh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/philox_unpack.cuh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/rotary.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/softmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/static_switch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn/src/utils.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn_ck/flash_api.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn_ck/flash_common.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn_ck/flash_common.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn_ck/mha_bwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn_ck/mha_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn_ck/mha_fwd_kvcache.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn_ck/mha_varlen_bwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/flash_attn_ck/mha_varlen_fwd.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/fused_dense_lib/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/fused_dense_lib/fused_dense.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/fused_dense_lib/fused_dense_cuda.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/fused_dense_lib/setup.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_api.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_bwd_1024.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_bwd_1280.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_bwd_1536.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_bwd_2048.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_bwd_256.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_bwd_2560.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_bwd_3072.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_bwd_4096.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_bwd_512.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_bwd_5120.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_bwd_6144.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_bwd_7168.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_bwd_768.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_bwd_8192.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_bwd_kernels.cuh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_fwd_1024.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_fwd_1280.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_fwd_1536.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_fwd_2048.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_fwd_256.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_fwd_2560.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_fwd_3072.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_fwd_4096.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_fwd_512.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_fwd_5120.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_fwd_6144.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_fwd_7168.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_fwd_768.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_fwd_8192.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_fwd_kernels.cuh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_kernel_traits.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_bwd_1024.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_bwd_1280.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_bwd_1536.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_bwd_2048.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_bwd_256.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_bwd_2560.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_bwd_3072.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_bwd_4096.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_bwd_512.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_bwd_5120.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_bwd_6144.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_bwd_7168.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_bwd_768.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_bwd_8192.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_fwd_1024.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_fwd_1280.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_fwd_1536.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_fwd_2048.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_fwd_256.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_fwd_2560.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_fwd_3072.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_fwd_4096.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_fwd_512.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_fwd_5120.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_fwd_6144.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_fwd_7168.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_fwd_768.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_fwd_8192.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_residual_bwd_kernels.cuh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_parallel_residual_fwd_kernels.cuh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/ln_utils.cuh +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/setup.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/csrc/layer_norm/static_switch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/bert_padding.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/.flake8 +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/ampere_helpers.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/barrier.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/benchmark.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/benchmark_mask_mod.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/blackwell_helpers.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/block_info.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/block_sparsity.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/copy_utils.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/cute_dsl_utils.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/fast_math.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/flash_bwd.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/flash_bwd_postprocess.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/flash_bwd_preprocess.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/flash_bwd_sm100.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/flash_bwd_sm90.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/flash_fwd.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/flash_fwd_combine.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/flash_fwd_sm100.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/hopper_helpers.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/interface.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/mask.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/mask_definitions.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/mma_sm100_desc.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/named_barrier.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/pack_gqa.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/pipeline.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/pyproject.toml +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/seqlen_info.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/softmax.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/testing.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/tile_scheduler.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/cute/utils.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_attn_interface.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_attn_triton.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_attn_triton_amd/Dockerfile +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_attn_triton_amd/README.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_attn_triton_amd/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_attn_triton_amd/bench.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_attn_triton_amd/bwd_prefill.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_attn_triton_amd/bwd_prefill_fused.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_attn_triton_amd/bwd_prefill_onekernel.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_attn_triton_amd/bwd_prefill_split.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_attn_triton_amd/bwd_ref.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_attn_triton_amd/fp8.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_attn_triton_amd/fwd_decode.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_attn_triton_amd/fwd_prefill.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_attn_triton_amd/fwd_ref.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_attn_triton_amd/interface_fa.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_attn_triton_amd/test.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_attn_triton_amd/train.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_attn_triton_amd/utils.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_attn_triton_og.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_blocksparse_attention.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/flash_blocksparse_attn_interface.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/layers/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/layers/patch_embed.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/layers/rotary.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/losses/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/losses/cross_entropy.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/models/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/models/baichuan.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/models/bert.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/models/bigcode.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/models/btlm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/models/falcon.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/models/gpt.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/models/gpt_neox.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/models/gptj.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/models/llama.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/models/opt.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/models/vit.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/modules/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/modules/block.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/modules/embedding.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/modules/mha.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/modules/mlp.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/ops/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/ops/activations.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/ops/fused_dense.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/ops/layer_norm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/ops/rms_norm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/ops/triton/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/ops/triton/cross_entropy.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/ops/triton/k_activations.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/ops/triton/layer_norm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/ops/triton/linear.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/ops/triton/mlp.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/ops/triton/rotary.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/pyproject.toml +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/utils/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/utils/benchmark.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/utils/distributed.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/utils/generation.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/utils/library.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/utils/pretrained.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/utils/testing.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/flash_attn/utils/torch.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/benchmark_attn.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/benchmark_flash_attention_fp8.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/benchmark_mla_decode.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/benchmark_split_kv.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/block.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/copy_sm90_bulk_reduce.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/cuda_check.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/epilogue_bwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/epilogue_fwd.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/flash.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/flash_api.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/flash_api_stable.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/flash_attn_interface.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/flash_bwd_kernel_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/flash_bwd_kernel_sm90.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/flash_bwd_launch_template.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/flash_bwd_postprocess_kernel.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/flash_bwd_preprocess_kernel.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/flash_fwd_combine.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/flash_fwd_combine_kernel.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/flash_fwd_combine_launch_template.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/flash_fwd_kernel_sm80.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/flash_fwd_kernel_sm90.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/flash_fwd_launch_template.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/flash_prepare_scheduler.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/generate_kernels.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/heuristics.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim128_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim128_bf16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim128_bf16_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim128_bf16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim128_bf16_softcapall_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim128_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim128_fp16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim128_fp16_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim128_fp16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim128_fp16_softcapall_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim192_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim192_bf16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim192_bf16_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim192_bf16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim192_bf16_softcapall_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim192_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim192_fp16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim192_fp16_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim192_fp16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim192_fp16_softcapall_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim256_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim256_bf16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim256_bf16_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim256_bf16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim256_bf16_softcapall_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim256_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim256_fp16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim256_fp16_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim256_fp16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim256_fp16_softcapall_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim64_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim64_bf16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim64_bf16_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim64_bf16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim64_bf16_softcapall_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim64_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim64_fp16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim64_fp16_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim64_fp16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim64_fp16_softcapall_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim96_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim96_bf16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim96_bf16_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim96_bf16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim96_bf16_softcapall_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim96_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim96_fp16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim96_fp16_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim96_fp16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_bwd_hdim96_fp16_softcapall_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_paged_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_paged_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_paged_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_paged_split_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_paged_split_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_paged_split_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_sm100.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_split_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_split_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_bf16_split_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_e4m3_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_e4m3_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_e4m3_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_e4m3_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_e4m3_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_e4m3_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_e4m3_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_e4m3_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_e4m3_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_e4m3_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_paged_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_paged_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_paged_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_paged_split_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_paged_split_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_paged_split_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_split_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_split_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim128_fp16_split_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_bf16_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_bf16_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_bf16_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_bf16_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_bf16_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_bf16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_bf16_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_bf16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_bf16_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_bf16_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_e4m3_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_e4m3_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_e4m3_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_e4m3_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_e4m3_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_e4m3_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_e4m3_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_e4m3_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_e4m3_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_e4m3_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_fp16_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_fp16_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_fp16_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_fp16_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_fp16_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_fp16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_fp16_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_fp16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_fp16_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_128_fp16_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_paged_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_paged_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_paged_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_paged_split_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_paged_split_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_paged_split_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_split_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_split_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_bf16_split_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_e4m3_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_e4m3_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_e4m3_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_e4m3_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_e4m3_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_e4m3_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_e4m3_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_e4m3_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_e4m3_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_e4m3_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_paged_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_paged_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_paged_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_paged_split_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_paged_split_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_paged_split_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_split_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_split_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim192_fp16_split_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_paged_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_paged_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_paged_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_paged_split_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_paged_split_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_paged_split_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_split_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_split_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_bf16_split_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_e4m3_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_e4m3_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_e4m3_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_e4m3_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_e4m3_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_e4m3_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_e4m3_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_e4m3_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_e4m3_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_e4m3_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_paged_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_paged_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_paged_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_paged_split_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_paged_split_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_paged_split_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_split_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_split_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim256_fp16_split_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_256_bf16_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_256_bf16_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_256_bf16_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_256_bf16_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_256_bf16_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_256_bf16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_256_bf16_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_256_bf16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_256_bf16_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_256_bf16_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_256_fp16_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_256_fp16_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_256_fp16_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_256_fp16_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_256_fp16_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_256_fp16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_256_fp16_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_256_fp16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_256_fp16_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_256_fp16_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_512_bf16_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_512_bf16_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_512_bf16_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_512_bf16_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_512_bf16_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_512_bf16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_512_bf16_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_512_bf16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_512_bf16_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_512_bf16_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_512_fp16_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_512_fp16_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_512_fp16_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_512_fp16_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_512_fp16_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_512_fp16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_512_fp16_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_512_fp16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_512_fp16_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_512_fp16_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_paged_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_paged_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_paged_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_paged_split_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_paged_split_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_paged_split_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_split_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_split_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_bf16_split_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_e4m3_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_e4m3_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_e4m3_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_e4m3_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_e4m3_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_e4m3_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_e4m3_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_e4m3_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_e4m3_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_e4m3_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_paged_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_paged_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_paged_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_paged_split_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_paged_split_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_paged_split_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_split_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_split_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim64_fp16_split_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_paged_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_paged_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_paged_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_paged_split_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_paged_split_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_paged_split_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_split_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_split_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_bf16_split_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_e4m3_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_e4m3_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_e4m3_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_e4m3_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_e4m3_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_e4m3_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_e4m3_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_e4m3_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_e4m3_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_e4m3_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_paged_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_paged_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_paged_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_paged_split_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_paged_split_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_paged_split_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_split_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_split_softcap_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdim96_fp16_split_softcapall_sm80.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_bf16_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_bf16_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_bf16_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_bf16_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_bf16_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_bf16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_bf16_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_bf16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_bf16_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_bf16_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_e4m3_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_e4m3_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_e4m3_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_e4m3_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_e4m3_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_e4m3_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_e4m3_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_e4m3_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_e4m3_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_e4m3_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_fp16_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_fp16_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_fp16_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_fp16_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_fp16_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_fp16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_fp16_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_fp16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_fp16_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimall_fp16_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_bf16_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_bf16_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_bf16_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_bf16_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_bf16_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_bf16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_bf16_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_bf16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_bf16_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_bf16_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_e4m3_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_e4m3_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_e4m3_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_e4m3_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_e4m3_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_e4m3_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_e4m3_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_e4m3_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_e4m3_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_e4m3_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_fp16_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_fp16_paged_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_fp16_paged_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_fp16_paged_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_fp16_paged_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_fp16_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_fp16_softcap_packgqa_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_fp16_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_fp16_split_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/instantiations/flash_fwd_hdimdiff_fp16_split_softcap_sm90.cu +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/mainloop_bwd_sm80.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/mainloop_bwd_sm90_tma_gmma_ws.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/mainloop_fwd_sm80.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/mainloop_fwd_sm90_tma_gmma_ws.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/mask.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/named_barrier.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/pack_gqa.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/padding.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/paged_kv.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/rotary.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/seqlen.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/setup.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/sm90_pipeline_no_cluster.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/softmax.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/static_switch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/test_attn_kvcache.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/test_flash_attn.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/test_flash_attn_bwd_determinism.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/test_kvcache.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/test_util.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/tile_scheduler.hpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/tile_size.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/third_party/flash-attention/hopper/utils.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/_cpp_lib.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/_deprecation_warning.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/attn_bias_utils.py +0 -0
- {xformers-0.0.34.dev1102/xformers/benchmarks/LRA → xformers-0.0.34.dev1116/xformers/benchmarks}/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/benchmarks/benchmark_attn_decoding.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/benchmarks/benchmark_indexing.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/benchmarks/benchmark_mem_eff_attention.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/benchmarks/benchmark_merge_attentions.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/benchmarks/benchmark_sequence_parallel_fused.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/benchmarks/benchmark_sp24.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/benchmarks/benchmark_tiled_matmul.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/benchmarks/utils.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/checkpoint.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_decoder/CMakeLists.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_decoder/attention_forward_splitk.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_decoder/ck_tile_attention_forward_decoder_splitk.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_decoder/ck_tile_attention_inner_product.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/GENERATE_INSTANCES.md +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/attention_backward_generic_ck_tiled.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/attention_ck_rand_uniform.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/attention_forward_generic_ck_tiled.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_fmha_test.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_fmha_util.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_bool_switch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_batched_backward.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_batched_backward_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_batched_backward_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_batched_forward.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_batched_forward_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_batched_forward_dispatch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_batched_forward_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_batched_forward_splitkv_dispatch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_batched_forward_splitkv_smallq_dispatch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_batched_infer.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_batched_infer_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_batched_infer_dispatch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_batched_infer_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_batched_infer_splitkv_dispatch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_batched_infer_splitkv_smallq_dispatch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_bwd_setting.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_fwd_setting.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_fwd_splitkv_selector.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_fwd_splitkv_setting.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_fwd_splitkv_smallq_selector.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_fwd_splitkv_smallq_setting.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_fwd_type_config.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_grouped_backward.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_grouped_backward_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_grouped_backward_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_grouped_forward.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_grouped_forward_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_grouped_forward_dispatch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_grouped_forward_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_grouped_forward_splitkv_dispatch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_grouped_forward_splitkv_smallq_dispatch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_grouped_infer.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_grouped_infer_bf16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_grouped_infer_dispatch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_grouped_infer_fp16.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_grouped_infer_splitkv_dispatch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_grouped_infer_splitkv_smallq_dispatch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_num_kv_split_switch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_params.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_fmha_seqlen_q_switch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_headdim_switch.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/ck_tiled_rand_uniform_kernel.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/generate_instances.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_has_bias_has_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_has_bias_has_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_has_bias_has_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_has_bias_has_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_has_bias_has_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_has_bias_has_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_has_bias_has_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_has_bias_has_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_has_bias_has_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_has_bias_has_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_has_bias_no_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_has_bias_no_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_has_bias_no_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_has_bias_no_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_has_bias_no_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_has_bias_no_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_has_bias_no_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_has_bias_no_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_has_bias_no_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_has_bias_no_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_no_bias_no_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_no_bias_no_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_no_bias_no_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_no_bias_no_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_no_bias_no_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_no_bias_no_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_no_bias_no_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_no_bias_no_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_no_bias_no_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_has_mask_no_bias_no_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_instances_ref.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_has_bias_has_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_has_bias_has_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_has_bias_has_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_has_bias_has_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_has_bias_has_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_has_bias_has_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_has_bias_has_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_has_bias_has_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_has_bias_has_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_has_bias_has_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_has_bias_no_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_has_bias_no_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_has_bias_no_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_has_bias_no_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_has_bias_no_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_has_bias_no_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_has_bias_no_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_has_bias_no_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_has_bias_no_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_has_bias_no_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_no_bias_no_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_no_bias_no_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_no_bias_no_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_no_bias_no_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_no_bias_no_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_no_bias_no_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_no_bias_no_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_no_bias_no_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_no_bias_no_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_bf16_no_mask_no_bias_no_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_has_bias_has_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_has_bias_has_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_has_bias_has_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_has_bias_has_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_has_bias_has_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_has_bias_has_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_has_bias_has_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_has_bias_has_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_has_bias_has_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_has_bias_has_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_has_bias_no_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_has_bias_no_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_has_bias_no_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_has_bias_no_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_has_bias_no_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_has_bias_no_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_has_bias_no_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_has_bias_no_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_has_bias_no_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_has_bias_no_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_no_bias_no_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_no_bias_no_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_no_bias_no_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_no_bias_no_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_no_bias_no_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_no_bias_no_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_no_bias_no_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_no_bias_no_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_no_bias_no_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_has_mask_no_bias_no_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_instances_ref.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_has_bias_has_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_has_bias_has_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_has_bias_has_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_has_bias_has_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_has_bias_has_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_has_bias_has_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_has_bias_has_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_has_bias_has_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_has_bias_has_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_has_bias_has_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_has_bias_no_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_has_bias_no_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_has_bias_no_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_has_bias_no_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_has_bias_no_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_has_bias_no_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_has_bias_no_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_has_bias_no_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_has_bias_no_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_has_bias_no_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_no_bias_no_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_no_bias_no_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_no_bias_no_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_no_bias_no_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_no_bias_no_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_no_bias_no_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_no_bias_no_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_no_bias_no_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_no_bias_no_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_backward_fp16_no_mask_no_bias_no_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_has_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_has_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_has_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_has_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_has_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_has_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_has_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_has_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_has_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_has_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_has_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_has_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_no_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_no_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_no_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_no_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_no_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_no_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_no_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_no_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_no_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_no_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_no_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_has_mask_no_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_instances_ref.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_has_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_has_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_has_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_has_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_has_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_has_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_has_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_has_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_has_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_has_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_has_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_has_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_no_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_no_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_no_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_no_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_no_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_no_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_no_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_no_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_no_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_no_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_no_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_bf16_no_mask_no_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_has_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_has_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_has_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_has_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_has_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_has_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_has_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_has_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_has_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_has_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_has_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_has_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_no_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_no_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_no_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_no_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_no_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_no_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_no_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_no_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_no_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_no_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_no_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_has_mask_no_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_instances_ref.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_has_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_has_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_has_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_has_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_has_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_has_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_has_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_has_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_has_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_has_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_has_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_has_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_no_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_no_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_no_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_no_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_no_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_no_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_no_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_no_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_no_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_no_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_no_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_forward_fp16_no_mask_no_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_has_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_has_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_has_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_has_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_has_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_has_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_has_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_has_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_has_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_has_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_has_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_has_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_no_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_no_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_no_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_no_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_no_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_no_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_no_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_no_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_no_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_no_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_no_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_has_mask_no_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_instances_ref.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_has_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_has_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_has_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_has_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_has_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_has_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_has_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_has_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_has_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_has_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_has_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_has_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_no_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_no_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_no_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_no_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_no_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_no_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_no_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_no_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_no_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_no_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_no_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_bf16_no_mask_no_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_has_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_has_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_has_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_has_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_has_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_has_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_has_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_has_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_has_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_has_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_has_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_has_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_no_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_no_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_no_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_no_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_no_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_no_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_no_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_no_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_no_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_no_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_no_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_has_mask_no_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_instances_ref.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_has_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_has_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_has_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_has_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_has_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_has_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_has_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_has_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_has_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_has_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_has_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_has_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_no_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_no_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_no_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_no_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_no_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_no_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_no_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_no_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_no_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_no_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_no_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_batched_infer_fp16_no_mask_no_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_has_bias_has_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_has_bias_has_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_has_bias_has_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_has_bias_has_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_has_bias_has_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_has_bias_has_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_has_bias_has_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_has_bias_has_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_has_bias_has_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_has_bias_has_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_has_bias_no_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_has_bias_no_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_has_bias_no_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_has_bias_no_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_has_bias_no_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_has_bias_no_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_has_bias_no_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_has_bias_no_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_has_bias_no_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_has_bias_no_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_no_bias_no_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_no_bias_no_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_no_bias_no_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_no_bias_no_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_no_bias_no_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_no_bias_no_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_no_bias_no_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_no_bias_no_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_no_bias_no_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_has_mask_no_bias_no_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_instances_ref.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_has_bias_has_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_has_bias_has_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_has_bias_has_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_has_bias_has_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_has_bias_has_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_has_bias_has_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_has_bias_has_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_has_bias_has_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_has_bias_has_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_has_bias_has_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_has_bias_no_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_has_bias_no_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_has_bias_no_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_has_bias_no_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_has_bias_no_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_has_bias_no_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_has_bias_no_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_has_bias_no_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_has_bias_no_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_has_bias_no_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_no_bias_no_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_no_bias_no_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_no_bias_no_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_no_bias_no_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_no_bias_no_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_no_bias_no_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_no_bias_no_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_no_bias_no_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_no_bias_no_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_bf16_no_mask_no_bias_no_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_has_bias_has_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_has_bias_has_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_has_bias_has_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_has_bias_has_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_has_bias_has_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_has_bias_has_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_has_bias_has_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_has_bias_has_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_has_bias_has_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_has_bias_has_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_has_bias_no_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_has_bias_no_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_has_bias_no_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_has_bias_no_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_has_bias_no_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_has_bias_no_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_has_bias_no_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_has_bias_no_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_has_bias_no_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_has_bias_no_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_no_bias_no_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_no_bias_no_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_no_bias_no_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_no_bias_no_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_no_bias_no_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_no_bias_no_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_no_bias_no_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_no_bias_no_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_no_bias_no_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_has_mask_no_bias_no_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_instances_ref.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_has_bias_has_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_has_bias_has_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_has_bias_has_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_has_bias_has_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_has_bias_has_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_has_bias_has_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_has_bias_has_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_has_bias_has_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_has_bias_has_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_has_bias_has_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_has_bias_no_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_has_bias_no_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_has_bias_no_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_has_bias_no_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_has_bias_no_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_has_bias_no_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_has_bias_no_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_has_bias_no_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_has_bias_no_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_has_bias_no_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_no_bias_no_biasgrad_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_no_bias_no_biasgrad_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_no_bias_no_biasgrad_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_no_bias_no_biasgrad_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_no_bias_no_biasgrad_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_no_bias_no_biasgrad_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_no_bias_no_biasgrad_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_no_bias_no_biasgrad_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_no_bias_no_biasgrad_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_backward_fp16_no_mask_no_bias_no_biasgrad_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_has_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_has_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_has_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_has_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_has_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_has_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_has_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_has_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_has_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_has_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_has_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_has_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_no_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_no_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_no_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_no_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_no_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_no_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_no_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_no_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_no_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_no_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_no_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_has_mask_no_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_instances_ref.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_has_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_has_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_has_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_has_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_has_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_has_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_has_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_has_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_has_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_has_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_has_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_has_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_no_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_no_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_no_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_no_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_no_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_no_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_no_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_no_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_no_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_no_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_no_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_bf16_no_mask_no_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_has_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_has_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_has_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_has_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_has_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_has_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_has_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_has_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_has_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_has_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_has_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_has_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_no_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_no_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_no_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_no_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_no_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_no_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_no_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_no_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_no_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_no_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_no_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_has_mask_no_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_instances_ref.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_has_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_has_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_has_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_has_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_has_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_has_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_has_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_has_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_has_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_has_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_has_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_has_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_no_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_no_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_no_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_no_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_no_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_no_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_no_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_no_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_no_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_no_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_no_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_forward_fp16_no_mask_no_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_has_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_has_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_has_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_has_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_has_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_has_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_has_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_has_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_has_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_has_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_has_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_has_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_no_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_no_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_no_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_no_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_no_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_no_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_no_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_no_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_no_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_no_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_no_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_has_mask_no_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_instances_ref.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_has_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_has_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_has_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_has_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_has_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_has_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_has_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_has_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_has_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_has_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_has_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_has_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_no_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_no_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_no_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_no_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_no_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_no_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_no_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_no_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_no_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_no_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_no_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_bf16_no_mask_no_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_has_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_has_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_has_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_has_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_has_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_has_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_has_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_has_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_has_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_has_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_has_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_has_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_no_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_no_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_no_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_no_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_no_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_no_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_no_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_no_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_no_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_no_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_no_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_has_mask_no_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_instances_ref.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_has_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_has_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_has_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_has_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_has_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_has_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_has_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_has_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_has_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_has_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_has_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_has_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_no_bias_has_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_no_bias_has_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_no_bias_has_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_no_bias_has_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_no_bias_has_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_no_bias_has_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_no_bias_no_dropout_maxk_128.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_no_bias_no_dropout_maxk_256.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_no_bias_no_dropout_maxk_32.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_no_bias_no_dropout_maxk_512.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_no_bias_no_dropout_maxk_64.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/attention/hip_fmha/instances/fmha_grouped_infer_fp16_no_mask_no_bias_no_dropout_maxk_96.cpp +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/sparse24/compute_sparse_tile.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/sparse24/sparse24_pack.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/sparse24/static_sort.h +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/csrc/sparse24/warp_tensor.h +0 -0
- {xformers-0.0.34.dev1102/xformers/benchmarks/LRA/code → xformers-0.0.34.dev1116/xformers/flash_attn_3}/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/fwbw_overlap.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/info.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/_triton/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/_triton/k_index_select_cat.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/_triton/k_scaled_index_add.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/_triton/matmul_perf_model.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/_triton/rmsnorm_kernels.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/_triton/rope_padded_kernels.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/_triton/tiled_matmul_kernels.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/common.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/differentiable_collectives.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/fmha/__init__.py +0 -0
- {xformers-0.0.34.dev1102/xformers/benchmarks → xformers-0.0.34.dev1116/xformers/ops/fmha/_triton}/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/fmha/attn_bias.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/fmha/ck.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/fmha/ck_splitk.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/fmha/common.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/fmha/cutlass.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/fmha/cutlass_blackwell.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/fmha/dispatch.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/fmha/merge_training.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/fmha/torch_attention_compat.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/fmha/triton_splitk.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/indexing.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/modpar_layers.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/rmsnorm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/rope_padded.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/seqpar.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/sequence_parallel_fused_ops.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/sp24.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/swiglu_op.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/tiled_matmul.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/tree_attention.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/ops/unbind.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/profiler/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/profiler/api.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/profiler/device_limits.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/profiler/find_slowest.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/profiler/profile_analyzer.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/profiler/profiler.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/profiler/profiler_dcgm.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/profiler/profiler_dcgm_impl.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/sparse/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/sparse/blocksparse_tensor.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/sparse/utils.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/test.py +0 -0
- {xformers-0.0.34.dev1102/xformers/flash_attn_3 → xformers-0.0.34.dev1116/xformers/triton}/__init__.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/triton/importing.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/triton/vararg_kernel.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers/utils.py +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers.egg-info/dependency_links.txt +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers.egg-info/not-zip-safe +0 -0
- {xformers-0.0.34.dev1102 → xformers-0.0.34.dev1116}/xformers.egg-info/top_level.txt +0 -0
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
include LICENSE
|
|
2
|
+
include requirements.txt
|
|
3
|
+
include version.txt
|
|
4
|
+
|
|
5
|
+
recursive-include xformers/csrc *
|
|
6
|
+
recursive-include third_party/cutlass/include *
|
|
7
|
+
recursive-include third_party/cutlass/tools/util/include *
|
|
8
|
+
recursive-include third_party/cutlass/examples *
|
|
9
|
+
recursive-include third_party/flash-attention/csrc *
|
|
10
|
+
recursive-include third_party/flash-attention/flash_attn *
|
|
11
|
+
recursive-include third_party/flash-attention/hopper *
|
|
12
|
+
|
|
13
|
+
prune third_party/flash-attention/csrc/cutlass/docs/
|
|
14
|
+
prune third_party/flash-attention/csrc/cutlass/test/
|
|
15
|
+
prune third_party/flash-attention/csrc/cutlass/tools/
|
|
16
|
+
prune third_party/flash-attention/csrc/cutlass/media/
|
|
17
|
+
prune third_party/flash-attention/csrc/cutlass/python/
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: xformers
|
|
3
|
+
Version: 0.0.34.dev1116
|
|
4
|
+
Summary: XFormers: A collection of composable Transformer building blocks.
|
|
5
|
+
Home-page: https://facebookresearch.github.io/xformers/
|
|
6
|
+
Author: Facebook AI Research
|
|
7
|
+
Author-email: oncall+xformers@xmail.facebook.com
|
|
8
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
9
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
10
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
12
|
+
Classifier: License :: OSI Approved :: BSD License
|
|
13
|
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
14
|
+
Classifier: Operating System :: OS Independent
|
|
15
|
+
Requires-Python: >=3.9
|
|
16
|
+
Description-Content-Type: text/markdown
|
|
17
|
+
License-File: LICENSE
|
|
18
|
+
Requires-Dist: torch>=2.10
|
|
19
|
+
Requires-Dist: numpy
|
|
20
|
+
Dynamic: author
|
|
21
|
+
Dynamic: author-email
|
|
22
|
+
Dynamic: classifier
|
|
23
|
+
Dynamic: description
|
|
24
|
+
Dynamic: description-content-type
|
|
25
|
+
Dynamic: home-page
|
|
26
|
+
Dynamic: license-file
|
|
27
|
+
Dynamic: requires-dist
|
|
28
|
+
Dynamic: requires-python
|
|
29
|
+
Dynamic: summary
|
|
30
|
+
|
|
31
|
+
XFormers: A collection of composable Transformer building blocks.XFormers aims at being able to reproduce most architectures in the Transformer-family SOTA,defined as compatible and combined building blocks as opposed to monolithic models
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
<img src="./docs/assets/logo.png" width=800>
|
|
2
|
+
|
|
3
|
+
[](https://colab.research.google.com/github/facebookresearch/xformers/blob/main/docs/source/xformers_mingpt.ipynb)
|
|
4
|
+
<br/><!--
|
|
5
|
+

|
|
6
|
+

|
|
7
|
+
[](https://github.com/facebookresearch/xformers/actions/workflows/gh-pages.yml/badge.svg)
|
|
8
|
+
-->
|
|
9
|
+
[](https://app.circleci.com/pipelines/github/facebookresearch/xformers/)
|
|
10
|
+
[](https://codecov.io/gh/facebookresearch/xformers)
|
|
11
|
+
[](https://github.com/psf/black)
|
|
12
|
+
<br/>
|
|
13
|
+
[](CONTRIBUTING.md)
|
|
14
|
+
<!--
|
|
15
|
+
[](https://pepy.tech/project/xformers)
|
|
16
|
+
-->
|
|
17
|
+
--------------------------------------------------------------------------------
|
|
18
|
+
|
|
19
|
+
## xFormers - Toolbox to Accelerate Research on Transformers
|
|
20
|
+
|
|
21
|
+
xFormers is:
|
|
22
|
+
- **Customizable building blocks**: Independent/customizable building blocks that can be used without boilerplate code. The components are domain-agnostic and xFormers is used by researchers in vision, NLP and more.
|
|
23
|
+
- **Research first**: xFormers contains bleeding-edge components, that are not yet available in mainstream libraries like PyTorch.
|
|
24
|
+
- **Built with efficiency in mind**: Because speed of iteration matters, components are as fast and memory-efficient as possible. xFormers contains its own CUDA kernels, but dispatches to other libraries when relevant.
|
|
25
|
+
|
|
26
|
+
## Installing xFormers
|
|
27
|
+
|
|
28
|
+
* **(RECOMMENDED, linux & win) Install latest stable with pip**: Requires [PyTorch 2.10.0](https://pytorch.org/get-started/locally/)
|
|
29
|
+
|
|
30
|
+
```bash
|
|
31
|
+
# [linux & win] cuda 12.6 version
|
|
32
|
+
pip3 install -U xformers --index-url https://download.pytorch.org/whl/cu126
|
|
33
|
+
# [linux & win] cuda 12.8 version
|
|
34
|
+
pip3 install -U xformers --index-url https://download.pytorch.org/whl/cu128
|
|
35
|
+
# [linux & win] cuda 13.0 version
|
|
36
|
+
pip3 install -U xformers --index-url https://download.pytorch.org/whl/cu130
|
|
37
|
+
# [linux only] (EXPERIMENTAL) rocm 7.1 version
|
|
38
|
+
pip3 install -U xformers --index-url https://download.pytorch.org/whl/rocm7.1
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
* **Development binaries**:
|
|
42
|
+
|
|
43
|
+
```bash
|
|
44
|
+
# Same requirements as for the stable version above
|
|
45
|
+
pip install --pre -U xformers
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
* **Install from source**: If you want to use with another version of PyTorch for instance (including nightly-releases)
|
|
49
|
+
|
|
50
|
+
```bash
|
|
51
|
+
# (Optional) Makes the build much faster
|
|
52
|
+
pip install ninja
|
|
53
|
+
# Set TORCH_CUDA_ARCH_LIST if running and building on different GPU types
|
|
54
|
+
# NOTE: pytorch must already be installed!
|
|
55
|
+
pip install -v --no-build-isolation -U git+https://github.com/facebookresearch/xformers.git@main#egg=xformers
|
|
56
|
+
# (this can take dozens of minutes)
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
## Benchmarks
|
|
61
|
+
|
|
62
|
+
**Memory-efficient MHA**
|
|
63
|
+

|
|
64
|
+
*Setup: A100 on f16, measured total time for a forward+backward pass*
|
|
65
|
+
|
|
66
|
+
Note that this is exact attention, not an approximation, just by calling [`xformers.ops.memory_efficient_attention`](https://facebookresearch.github.io/xformers/components/ops.html#xformers.ops.memory_efficient_attention)
|
|
67
|
+
|
|
68
|
+
**More benchmarks**
|
|
69
|
+
|
|
70
|
+
xFormers provides many components, and more benchmarks are available in [BENCHMARKS.md](BENCHMARKS.md).
|
|
71
|
+
|
|
72
|
+
### (Optional) Testing the installation
|
|
73
|
+
|
|
74
|
+
This command will provide information on an xFormers installation, and what kernels are built/available:
|
|
75
|
+
|
|
76
|
+
```python
|
|
77
|
+
python -m xformers.info
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
## Using xFormers
|
|
81
|
+
|
|
82
|
+
### Key Features
|
|
83
|
+
|
|
84
|
+
1. Optimized building blocks, beyond PyTorch primitives
|
|
85
|
+
1. Memory-efficient exact attention - up to 10x faster
|
|
86
|
+
2. sparse attention
|
|
87
|
+
3. block-sparse attention
|
|
88
|
+
4. fused softmax
|
|
89
|
+
5. fused linear layer
|
|
90
|
+
6. fused layer norm
|
|
91
|
+
7. fused dropout(activation(x+bias))
|
|
92
|
+
8. fused SwiGLU
|
|
93
|
+
|
|
94
|
+
### Install troubleshooting
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
* NVCC and the current CUDA runtime match. Depending on your setup, you may be able to change the CUDA runtime with `module unload cuda; module load cuda/xx.x`, possibly also `nvcc`
|
|
98
|
+
* the version of GCC that you're using matches the current NVCC capabilities
|
|
99
|
+
* the `TORCH_CUDA_ARCH_LIST` env variable is set to the architectures that you want to support. A suggested setup (slow to build but comprehensive) is `export TORCH_CUDA_ARCH_LIST="6.0;6.1;6.2;7.0;7.2;7.5;8.0;8.6"`
|
|
100
|
+
* If the build from source OOMs, it's possible to reduce the parallelism of ninja with `MAX_JOBS` (eg `MAX_JOBS=2`)
|
|
101
|
+
* If getting error message `Filename longer than 260 characters` on Windows, make sure long paths are enabled at OS level, and also execute the command `git config --global core.longpaths true`
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
### License
|
|
105
|
+
|
|
106
|
+
xFormers has a BSD-style license, as found in the [LICENSE](LICENSE) file.
|
|
107
|
+
It includes code from the [triton-lang/kernels](https://github.com/triton-lang/kernels) repo.
|
|
108
|
+
|
|
109
|
+
## Citing xFormers
|
|
110
|
+
|
|
111
|
+
If you use xFormers in your publication, please cite it by using the following BibTeX entry.
|
|
112
|
+
|
|
113
|
+
``` bibtex
|
|
114
|
+
@Misc{xFormers2022,
|
|
115
|
+
author = {Benjamin Lefaudeux and Francisco Massa and Diana Liskovich and Wenhan Xiong and Vittorio Caggiano and Sean Naren and Min Xu and Jieru Hu and Marta Tintore and Susan Zhang and Patrick Labatut and Daniel Haziza and Luca Wehrstedt and Jeremy Reizenstein and Grigory Sizov},
|
|
116
|
+
title = {xFormers: A modular and hackable Transformer modelling library},
|
|
117
|
+
howpublished = {\url{https://github.com/facebookresearch/xformers}},
|
|
118
|
+
year = {2022}
|
|
119
|
+
}
|
|
120
|
+
```
|
|
121
|
+
|
|
122
|
+
## Credits
|
|
123
|
+
|
|
124
|
+
The following repositories are used in xFormers, either in close to original form or as an inspiration:
|
|
125
|
+
|
|
126
|
+
* [Sputnik](https://github.com/google-research/sputnik)
|
|
127
|
+
* [GE-SpMM](https://github.com/hgyhungry/ge-spmm)
|
|
128
|
+
* [Triton](https://github.com/openai/triton)
|
|
129
|
+
* [LucidRain Reformer](https://github.com/lucidrains/reformer-pytorch)
|
|
130
|
+
* [RevTorch](https://github.com/RobinBruegger/RevTorch)
|
|
131
|
+
* [Nystromformer](https://github.com/mlpen/Nystromformer)
|
|
132
|
+
* [FairScale](https://github.com/facebookresearch/fairscale/)
|
|
133
|
+
* [Pytorch Image Models](https://github.com/rwightman/pytorch-image-models)
|
|
134
|
+
* [CUTLASS](https://github.com/nvidia/cutlass)
|
|
135
|
+
* [Flash-Attention](https://github.com/HazyResearch/flash-attention)
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
# XXX: If your project needs other packages to build properly, add them to this list.
|
|
3
|
+
requires = ["setuptools >= 64", "torch >= 2.10"]
|
|
4
|
+
build-backend = "setuptools.build_meta"
|
|
5
|
+
|
|
6
|
+
[tool.black]
|
|
7
|
+
target-version = ["py310"]
|
|
8
|
+
line-length = 88
|
|
9
|
+
|
|
10
|
+
[tool.usort]
|
|
11
|
+
first_party_detection = false
|