cache-dit 1.0.9__py3-none-any.whl → 1.0.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cache-dit might be problematic. Click here for more details.
- cache_dit/_version.py +2 -2
- cache_dit/cache_factory/__init__.py +1 -0
- cache_dit/cache_factory/block_adapters/__init__.py +37 -0
- cache_dit/cache_factory/block_adapters/block_adapters.py +51 -3
- cache_dit/cache_factory/block_adapters/block_registers.py +41 -14
- cache_dit/cache_factory/cache_adapters/cache_adapter.py +68 -30
- cache_dit/cache_factory/cache_contexts/cache_config.py +5 -3
- cache_dit/cache_factory/cache_contexts/cache_manager.py +125 -4
- cache_dit/cache_factory/cache_contexts/context_manager.py +9 -2
- cache_dit/cache_factory/cache_contexts/prune_manager.py +15 -2
- cache_dit/cache_factory/cache_interface.py +29 -3
- cache_dit/cache_factory/forward_pattern.py +14 -14
- cache_dit/parallelism/backends/native_diffusers/__init__.py +0 -3
- cache_dit/parallelism/backends/native_diffusers/context_parallelism/__init__.py +95 -0
- cache_dit/parallelism/backends/native_diffusers/context_parallelism/cp_plan_registers.py +74 -0
- cache_dit/parallelism/backends/native_diffusers/context_parallelism/cp_planners.py +254 -0
- cache_dit/parallelism/backends/native_diffusers/parallel_difffusers.py +17 -61
- cache_dit/parallelism/backends/native_diffusers/utils.py +11 -0
- cache_dit/parallelism/backends/native_pytorch/__init__.py +3 -0
- cache_dit/parallelism/backends/native_pytorch/parallel_torch.py +62 -0
- cache_dit/parallelism/backends/native_pytorch/tensor_parallelism/__init__.py +48 -0
- cache_dit/parallelism/backends/native_pytorch/tensor_parallelism/tp_plan_flux.py +159 -0
- cache_dit/parallelism/backends/native_pytorch/tensor_parallelism/tp_plan_qwen_image.py +78 -0
- cache_dit/parallelism/backends/native_pytorch/tensor_parallelism/tp_plan_registers.py +58 -0
- cache_dit/parallelism/backends/native_pytorch/tensor_parallelism/tp_plan_wan.py +153 -0
- cache_dit/parallelism/backends/native_pytorch/tensor_parallelism/tp_planners.py +12 -0
- cache_dit/parallelism/parallel_backend.py +2 -0
- cache_dit/parallelism/parallel_config.py +8 -1
- cache_dit/parallelism/parallel_interface.py +9 -4
- cache_dit/quantize/backends/__init__.py +1 -0
- cache_dit/quantize/backends/bitsandbytes/__init__.py +0 -0
- cache_dit/quantize/backends/torchao/__init__.py +1 -0
- cache_dit/quantize/{quantize_ao.py → backends/torchao/quantize_ao.py} +28 -9
- cache_dit/quantize/quantize_backend.py +0 -0
- cache_dit/quantize/quantize_config.py +0 -0
- cache_dit/quantize/quantize_interface.py +3 -16
- cache_dit/utils.py +22 -2
- {cache_dit-1.0.9.dist-info → cache_dit-1.0.10.dist-info}/METADATA +22 -13
- {cache_dit-1.0.9.dist-info → cache_dit-1.0.10.dist-info}/RECORD +45 -29
- /cache_dit/{custom_ops → kernels}/__init__.py +0 -0
- /cache_dit/{custom_ops → kernels}/triton_taylorseer.py +0 -0
- {cache_dit-1.0.9.dist-info → cache_dit-1.0.10.dist-info}/WHEEL +0 -0
- {cache_dit-1.0.9.dist-info → cache_dit-1.0.10.dist-info}/entry_points.txt +0 -0
- {cache_dit-1.0.9.dist-info → cache_dit-1.0.10.dist-info}/licenses/LICENSE +0 -0
- {cache_dit-1.0.9.dist-info → cache_dit-1.0.10.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: cache_dit
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.10
|
|
4
4
|
Summary: A Unified, Flexible and Training-free Cache Acceleration Framework for 🤗Diffusers.
|
|
5
5
|
Author: DefTruth, vipshop.com, etc.
|
|
6
6
|
Maintainer: DefTruth, vipshop.com, etc
|
|
@@ -14,6 +14,8 @@ Requires-Dist: pyyaml
|
|
|
14
14
|
Requires-Dist: torch>=2.7.1
|
|
15
15
|
Requires-Dist: diffusers>=0.35.1
|
|
16
16
|
Requires-Dist: transformers>=4.55.2
|
|
17
|
+
Provides-Extra: parallelism
|
|
18
|
+
Requires-Dist: einops>=0.8.1; extra == "parallelism"
|
|
17
19
|
Provides-Extra: quantization
|
|
18
20
|
Requires-Dist: torchao>=0.12.0; extra == "quantization"
|
|
19
21
|
Provides-Extra: metrics
|
|
@@ -36,6 +38,7 @@ Requires-Dist: opencv-python-headless; extra == "dev"
|
|
|
36
38
|
Requires-Dist: ftfy; extra == "dev"
|
|
37
39
|
Requires-Dist: scikit-image; extra == "dev"
|
|
38
40
|
Provides-Extra: all
|
|
41
|
+
Requires-Dist: cache-dit[parallelism]; extra == "all"
|
|
39
42
|
Requires-Dist: cache-dit[quantization]; extra == "all"
|
|
40
43
|
Requires-Dist: cache-dit[metrics]; extra == "all"
|
|
41
44
|
Dynamic: license-file
|
|
@@ -49,16 +52,16 @@ Dynamic: requires-python
|
|
|
49
52
|
A <b>Unified</b>, Flexible and Training-free <b>Cache Acceleration</b> Framework for <b>🤗Diffusers</b> <br>
|
|
50
53
|
♥️ Cache Acceleration with <b>One-line</b> Code ~ ♥️ <br>
|
|
51
54
|
🔥<a href="./docs/User_Guide.md">Forward Pattern Matching</a> | <a href="./docs/User_Guide.md">Automatic Block Adapter</a>🔥 <br>
|
|
52
|
-
🔥<a href="./docs/User_Guide.md"><b>DBCache</b></a> | <a href="./docs/User_Guide.md"><b>DBPrune</b></a> | <a href="./docs/User_Guide.md">Hybrid <b>TaylorSeer</b> Calibrator</a> | <a href="./docs/User_Guide.md">Cache CFG</a>🔥<br>
|
|
53
|
-
🔥<a href="./docs/User_Guide.md"><b>Context Parallelism</b></a> | <a href="./docs/User_Guide.md">
|
|
55
|
+
🔥<a href="./docs/User_Guide.md"><b>DBCache</b></a> | <a href="./docs/User_Guide.md"><b>DBPrune</b></a> | <a href="./docs/User_Guide.md">Hybrid <b>TaylorSeer</b> Calibrator</a> | <a href="./docs/User_Guide.md">Hybrid <b>Cache CFG</b></a>🔥<br>
|
|
56
|
+
🔥<a href="./docs/User_Guide.md"><b>Context Parallelism</b></a> | <a href="./docs/User_Guide.md"><b>Tensor Parallelism</b></a> | <a href="./docs/User_Guide.md">Low-bits Quantization</a>🔥<br>
|
|
57
|
+
🔥<a href="./docs/User_Guide.md">Compile Compatible</a> | <a href="./docs/User_Guide.md"><b>🎉State-of-the-Art Performance</b></a>🎉
|
|
54
58
|
</p>
|
|
55
59
|
<div align='center'>
|
|
56
60
|
<img src=https://img.shields.io/badge/Language-Python-brightgreen.svg >
|
|
57
|
-
<img src=https://img.shields.io/badge/PRs-welcome-blue.svg >
|
|
58
61
|
<img src=https://img.shields.io/badge/PyPI-pass-brightgreen.svg >
|
|
59
|
-
<img src=https://static.pepy.tech/badge/cache-dit
|
|
62
|
+
<a href="https://pepy.tech/projects/cache-dit"><img src=https://static.pepy.tech/personalized-badge/cache-dit?period=total&units=INTERNATIONAL_SYSTEM&left_color=GRAY&right_color=GREEN&left_text=downloads></a>
|
|
63
|
+
<img src=https://img.shields.io/github/issues/vipshop/cache-dit.svg >
|
|
60
64
|
<img src=https://img.shields.io/github/stars/vipshop/cache-dit.svg?style=dark >
|
|
61
|
-
<img src=https://img.shields.io/badge/Release-v1.0-brightgreen.svg >
|
|
62
65
|
</div>
|
|
63
66
|
<p align="center">
|
|
64
67
|
🎉Now, <b>cache-dit</b> covers almost <b>All</b> Diffusers' <b>DiT</b> Pipelines🎉<br>
|
|
@@ -167,10 +170,11 @@ Dynamic: requires-python
|
|
|
167
170
|
|
|
168
171
|
</details>
|
|
169
172
|
|
|
170
|
-
## 🔥Hightlight <a href="https://huggingface.co/docs/diffusers/main/en/optimization/cache_dit"><img src=https://img.shields.io/badge/🤗Diffusers-ecosystem-yellow.svg ></a>
|
|
173
|
+
## 🔥Hightlight <a href="https://huggingface.co/docs/diffusers/main/en/optimization/cache_dit"><img src=https://img.shields.io/badge/🤗Diffusers-ecosystem-yellow.svg ></a> <a href="https://hellogithub.com/repository/vipshop/cache-dit" target="_blank"><img src="https://api.hellogithub.com/v1/widgets/recommend.svg?rid=b8b03b3b32a449ea84cfc2b96cd384f3&claim_uid=ofSCbzTmdeQk3FD&theme=small" alt="Featured|HelloGitHub" /></a> <a href="https://pypi.org/project/cache-dit/"><img src=https://img.shields.io/pypi/dm/cache-dit.svg ></a> <img src=https://img.shields.io/badge/Models-30+-hotpink.svg > <img src=https://img.shields.io/badge/Pipelines-~100+-hotpink.svg >
|
|
174
|
+
|
|
171
175
|
|
|
172
176
|
We are excited to announce that the **first API-stable version (v1.0.0)** of cache-dit has finally been released!
|
|
173
|
-
**[cache-dit](https://github.com/vipshop/cache-dit)** is a **Unified**, **Flexible**, and **Training-free** cache acceleration framework for 🤗 Diffusers, enabling cache acceleration with just **one line** of code. Key features: **Unified Cache APIs**, **Forward Pattern Matching**, **Automatic Block Adapter**, **DBCache**, **DBPrune**, **Hybrid TaylorSeer Calibrator**, **Hybrid Cache CFG**, **Context Parallelism**, **Torch Compile Compatible** and **🎉SOTA** performance.
|
|
177
|
+
**[cache-dit](https://github.com/vipshop/cache-dit)** is a **Unified**, **Flexible**, and **Training-free** cache acceleration framework for 🤗 Diffusers, enabling cache acceleration with just **one line** of code. Key features: **Unified Cache APIs**, **Forward Pattern Matching**, **Automatic Block Adapter**, **DBCache**, **DBPrune**, **Hybrid TaylorSeer Calibrator**, **Hybrid Cache CFG**, **Context Parallelism**, **Tensor Parallelism**, **Torch Compile Compatible** and **🎉SOTA** performance.
|
|
174
178
|
|
|
175
179
|
```bash
|
|
176
180
|
pip3 install -U cache-dit # pip3 install git+https://github.com/vipshop/cache-dit.git
|
|
@@ -193,7 +197,7 @@ You can install the stable release of cache-dit from PyPI, or the latest develop
|
|
|
193
197
|
- **[🎉Easy New Model Integration](./docs/User_Guide.md#automatic-block-adapter)**: Features like **Unified Cache APIs**, **Forward Pattern Matching**, **Automatic Block Adapter**, **Hybrid Forward Pattern**, and **Patch Functor** make it highly functional and flexible. For example, we achieved 🎉 Day 1 support for [HunyuanImage-2.1](https://github.com/Tencent-Hunyuan/HunyuanImage-2.1) with 1.7x speedup w/o precision loss—even before it was available in the Diffusers library.
|
|
194
198
|
- **[🎉State-of-the-Art Performance](./bench/)**: Compared with algorithms including Δ-DiT, Chipmunk, FORA, DuCa, TaylorSeer and FoCa, cache-dit achieved the **SOTA** performance w/ **7.4x↑🎉** speedup on ClipScore!
|
|
195
199
|
- **[🎉Support for 4/8-Steps Distilled Models](./bench/)**: Surprisingly, cache-dit's **DBCache** works for extremely few-step distilled models—something many other methods fail to do.
|
|
196
|
-
- **[🎉Compatibility with Other Optimizations](./docs/User_Guide.md#️torch-compile)**: Designed to work seamlessly with torch.compile, Quantization ([torchao](./examples/quantize/), [🔥nunchaku](./examples/quantize/)), CPU or Sequential Offloading, **[🔥Context Parallelism](./docs/User_Guide.md/#️hybrid-context-parallelism)**, Tensor Parallelism
|
|
200
|
+
- **[🎉Compatibility with Other Optimizations](./docs/User_Guide.md#️torch-compile)**: Designed to work seamlessly with torch.compile, Quantization ([torchao](./examples/quantize/), [🔥nunchaku](./examples/quantize/)), CPU or Sequential Offloading, **[🔥Context Parallelism](./docs/User_Guide.md/#️hybrid-context-parallelism)**, **[🔥Tensor Parallelism](./docs/User_Guide.md#️hybrid-tensor-parallelism)**, etc.
|
|
197
201
|
- **[🎉Hybrid Cache Acceleration](./docs/User_Guide.md#taylorseer-calibrator)**: Now supports hybrid **Block-wise Cache + Calibrator** schemes (e.g., DBCache or DBPrune + TaylorSeerCalibrator). DBCache or DBPrune acts as the **Indicator** to decide *when* to cache, while the Calibrator decides *how* to cache. More mainstream cache acceleration algorithms (e.g., FoCa) will be supported in the future, along with additional benchmarks—stay tuned for updates!
|
|
198
202
|
- **[🤗Diffusers Ecosystem Integration](https://huggingface.co/docs/diffusers/main/en/optimization/cache_dit)**: 🔥**cache-dit** has joined the Diffusers community ecosystem as the **first** DiT-specific cache acceleration framework! Check out the documentation here: <a href="https://huggingface.co/docs/diffusers/main/en/optimization/cache_dit"><img src=https://img.shields.io/badge/🤗Diffusers-ecosystem-yellow.svg ></a>
|
|
199
203
|
|
|
@@ -201,6 +205,8 @@ You can install the stable release of cache-dit from PyPI, or the latest develop
|
|
|
201
205
|
|
|
202
206
|
## 🔥Important News
|
|
203
207
|
|
|
208
|
+
- 2025.10.28: 🔥Day 1 support [**LongCat-Video**](https://huggingface.co/meituan-longcat/LongCat-Video) with cache acceleration, ~**1.7x↑🎉**, 📚[Example](https://github.com/vipshop/cache-dit/blob/main/examples/pipeline/run_longcat_video.py).
|
|
209
|
+
- 2025.10.28: 🎉Currently, **cache-dit** has been recommended by [<a href="https://hellogithub.com/repository/vipshop/cache-dit" target="_blank"><img src="https://api.hellogithub.com/v1/widgets/recommend.svg?rid=b8b03b3b32a449ea84cfc2b96cd384f3&claim_uid=ofSCbzTmdeQk3FD&theme=small" alt="Featured|HelloGitHub" /></a>](https://hellogithub.com/repository/vipshop/cache-dit) 🔥
|
|
204
210
|
- 2025.10.23: 🎉Now cache-dit supported the [Kandinsky5 T2V](https://github.com/ai-forever/Kandinsky-5) and [Photoroom/PRX](https://github.com/huggingface/diffusers/pull/12456) pipelines.
|
|
205
211
|
- 2025.10.20: 🔥Now cache-dit supported the **[Hybrid Cache + Context Parallelism](./docs/User_Guide.md/#️hybrid-context-parallelism)** scheme!🔥
|
|
206
212
|
- 2025.10.16: 🎉cache-dit + [**🔥nunchaku 4-bits**](https://github.com/nunchaku-tech/nunchaku) supported: [Qwen-Image-Lightning 4/8 steps](./examples/quantize/).
|
|
@@ -232,7 +238,7 @@ You can install the stable release of cache-dit from PyPI, or the latest develop
|
|
|
232
238
|
|
|
233
239
|
<div id="user-guide"></div>
|
|
234
240
|
|
|
235
|
-
For more advanced features such as **Unified Cache APIs**, **Forward Pattern Matching**, **Automatic Block Adapter**, **Hybrid Forward Pattern**, **Patch Functor**, **DBCache**, **TaylorSeer Calibrator**,
|
|
241
|
+
For more advanced features such as **Unified Cache APIs**, **Forward Pattern Matching**, **Automatic Block Adapter**, **Hybrid Forward Pattern**, **Patch Functor**, **DBCache**, **DBPrune**, **TaylorSeer Calibrator**, **Hybrid Cache CFG**, **Context Parallelism** and **Tensor Parallelism**, please refer to the [🎉User_Guide.md](./docs/User_Guide.md) for details.
|
|
236
242
|
|
|
237
243
|
- [⚙️Installation](./docs/User_Guide.md#️installation)
|
|
238
244
|
- [🔥Benchmarks](./docs/User_Guide.md#benchmarks)
|
|
@@ -243,12 +249,15 @@ For more advanced features such as **Unified Cache APIs**, **Forward Pattern Mat
|
|
|
243
249
|
- [🔥Automatic Block Adapter](./docs/User_Guide.md#automatic-block-adapter)
|
|
244
250
|
- [📚Hybrid Forward Pattern](./docs/User_Guide.md#hybrid-forward-pattern)
|
|
245
251
|
- [📚Implement Patch Functor](./docs/User_Guide.md#implement-patch-functor)
|
|
252
|
+
- [📚Transformer-Only Interface](./docs/User_Guide.md#transformer-only-interface)
|
|
246
253
|
- [🤖Cache Acceleration Stats](./docs/User_Guide.md#cache-acceleration-stats-summary)
|
|
247
254
|
- [⚡️DBCache: Dual Block Cache](./docs/User_Guide.md#️dbcache-dual-block-cache)
|
|
248
255
|
- [⚡️DBPrune: Dynamic Block Prune](./docs/User_Guide.md#️dbprune-dynamic-block-prune)
|
|
249
256
|
- [⚡️Hybrid Cache CFG](./docs/User_Guide.md#️hybrid-cache-cfg)
|
|
250
257
|
- [🔥Hybrid TaylorSeer Calibrator](./docs/User_Guide.md#taylorseer-calibrator)
|
|
251
|
-
- [⚡️Hybrid Context Parallelism](./docs/User_Guide.md#context-
|
|
258
|
+
- [⚡️Hybrid Context Parallelism](./docs/User_Guide.md#context-parallelism)
|
|
259
|
+
- [⚡️Hybrid Tensor Parallelism](./docs/User_Guide.md#tensor-parallelism)
|
|
260
|
+
- [🤖Low-bits Quantization](./docs/User_Guide.md#quantization)
|
|
252
261
|
- [🛠Metrics Command Line](./docs/User_Guide.md#metrics-cli)
|
|
253
262
|
- [⚙️Torch Compile](./docs/User_Guide.md#️torch-compile)
|
|
254
263
|
- [📚API Documents](./docs/User_Guide.md#api-documentation)
|
|
@@ -271,7 +280,7 @@ How to contribute? Star ⭐️ this repo to support us or check [CONTRIBUTE.md](
|
|
|
271
280
|
|
|
272
281
|
## 🎉Projects Using CacheDiT
|
|
273
282
|
|
|
274
|
-
Here is a curated list of open-source projects integrating **CacheDiT**, including popular repositories like [jetson-containers](https://github.com/dusty-nv/jetson-containers/blob/master/packages/diffusion/cache_edit/build.sh) , [flux-fast](https://github.com/huggingface/flux-fast) , and [sdnext](https://github.com/vladmandic/sdnext/discussions/4269) .
|
|
283
|
+
Here is a curated list of open-source projects integrating **CacheDiT**, including popular repositories like [jetson-containers](https://github.com/dusty-nv/jetson-containers/blob/master/packages/diffusion/cache_edit/build.sh) , [flux-fast](https://github.com/huggingface/flux-fast) , and [sdnext](https://github.com/vladmandic/sdnext/discussions/4269) . 🎉**CacheDiT** has also been **recommended** by many famous projects: [Wan2.2](https://github.com/Wan-Video/Wan2.2) , [Qwen-Image-Lightning](https://github.com/ModelTC/Qwen-Image-Lightning) , [Qwen-Image](https://github.com/QwenLM/Qwen-Image) , [LongCat-Video](https://github.com/meituan-longcat/LongCat-Video) , <a href="https://huggingface.co/docs/diffusers/main/en/optimization/cache_dit"><img src="https://img.shields.io/badge/🤗Diffusers-ecosystem-yellow.svg"></a> , [<a href="https://hellogithub.com/repository/vipshop/cache-dit" target="_blank"><img src="https://api.hellogithub.com/v1/widgets/recommend.svg?rid=b8b03b3b32a449ea84cfc2b96cd384f3&claim_uid=ofSCbzTmdeQk3FD&theme=small" alt="Featured|HelloGitHub" /></a>](https://hellogithub.com/repository/vipshop/cache-dit) , among others.
|
|
275
284
|
|
|
276
285
|
## ©️Acknowledgements
|
|
277
286
|
|
|
@@ -292,7 +301,7 @@ Special thanks to vipshop's Computer Vision AI Team for supporting document, tes
|
|
|
292
301
|
title={cache-dit: A Unified, Flexible and Training-free Cache Acceleration Framework for Diffusers.},
|
|
293
302
|
url={https://github.com/vipshop/cache-dit.git},
|
|
294
303
|
note={Open-source software available at https://github.com/vipshop/cache-dit.git},
|
|
295
|
-
author={vipshop.com},
|
|
304
|
+
author={DefTruth, vipshop.com},
|
|
296
305
|
year={2025}
|
|
297
306
|
}
|
|
298
307
|
```
|
|
@@ -1,19 +1,19 @@
|
|
|
1
1
|
cache_dit/__init__.py,sha256=Azqj-3QMQK4HZDTGgyUtAfatUwuU-YQ4w8erJSyrsbE,2082
|
|
2
|
-
cache_dit/_version.py,sha256=
|
|
2
|
+
cache_dit/_version.py,sha256=tHFvhjm1Ch2x5K0Hyz16rJbtaKyru5Di9vC_Puo-s9w,706
|
|
3
3
|
cache_dit/logger.py,sha256=0zsu42hN-3-rgGC_C29ms1IvVpV4_b4_SwJCKSenxBE,4304
|
|
4
|
-
cache_dit/utils.py,sha256=
|
|
4
|
+
cache_dit/utils.py,sha256=xlrjGHD-JepiAumGsxTKF7racsylEZL59_scf5y4e24,19295
|
|
5
5
|
cache_dit/cache_factory/.gitignore,sha256=5Cb-qT9wsTUoMJ7vACDF7ZcLpAXhi5v-xdcWSRit988,23
|
|
6
|
-
cache_dit/cache_factory/__init__.py,sha256=
|
|
7
|
-
cache_dit/cache_factory/cache_interface.py,sha256=
|
|
6
|
+
cache_dit/cache_factory/__init__.py,sha256=9b29gAb3Nd2Fpj-XvoMcbOMki4LwhvdBUk5GjWqXczQ,1921
|
|
7
|
+
cache_dit/cache_factory/cache_interface.py,sha256=BQgN7Z4FiYKHZCjZMz8G11hYlD3WRBqPQ0ZQxyxK1xQ,18248
|
|
8
8
|
cache_dit/cache_factory/cache_types.py,sha256=QnWfaS52UOXQtnoCUOwwz4ziY0dyBta6vQ6hvgtdV44,1404
|
|
9
|
-
cache_dit/cache_factory/forward_pattern.py,sha256=
|
|
9
|
+
cache_dit/cache_factory/forward_pattern.py,sha256=DAnldDC-B_FOMK8REtX2hx8mLZ9GLe1UWYfkxzyyMgo,2197
|
|
10
10
|
cache_dit/cache_factory/params_modifier.py,sha256=2T98IbepAolWW6GwQsqUDsRzu0k65vo7BOrN3V8mKog,3606
|
|
11
11
|
cache_dit/cache_factory/utils.py,sha256=S3SD6Zhexzhkqnmfo830v6oNLm8stZe32nF4VdxD_bA,2497
|
|
12
|
-
cache_dit/cache_factory/block_adapters/__init__.py,sha256=
|
|
13
|
-
cache_dit/cache_factory/block_adapters/block_adapters.py,sha256=
|
|
14
|
-
cache_dit/cache_factory/block_adapters/block_registers.py,sha256=
|
|
12
|
+
cache_dit/cache_factory/block_adapters/__init__.py,sha256=K3fQPgHW_Zoz8sjYwQlD_O8jHlo4ib4XBAh12yK7-n0,21122
|
|
13
|
+
cache_dit/cache_factory/block_adapters/block_adapters.py,sha256=VJoGHUgQfWuwCY4YidvNiFMk7UQ5o3RmfgRCuePW948,25285
|
|
14
|
+
cache_dit/cache_factory/block_adapters/block_registers.py,sha256=V3VJr6P0mOi49F_dWIXq_n6UkQLObZJYfLsbHVoLX28,3833
|
|
15
15
|
cache_dit/cache_factory/cache_adapters/__init__.py,sha256=py71WGD3JztQ1uk6qdLVbzYcQ1rvqFidNNaQYo7tqTo,79
|
|
16
|
-
cache_dit/cache_factory/cache_adapters/cache_adapter.py,sha256=
|
|
16
|
+
cache_dit/cache_factory/cache_adapters/cache_adapter.py,sha256=knMNq02aHCucGqub3XGqNYyTcmdtTCB6xHrh2eYmZ6w,25720
|
|
17
17
|
cache_dit/cache_factory/cache_blocks/__init__.py,sha256=cpxzmDcUhbXcReHqaKSnWyEEbIg1H91Pz5hE3z9Xj3k,9984
|
|
18
18
|
cache_dit/cache_factory/cache_blocks/offload_utils.py,sha256=wusgcqaCrwEjvv7Guy-6VXhNOgPPUrBV2sSVuRmGuvo,3513
|
|
19
19
|
cache_dit/cache_factory/cache_blocks/pattern_0_1_2.py,sha256=j4bTafqU5DLQhzP_X5XwOk-QUVLWkGrX-Q6JZvBGHh0,666
|
|
@@ -21,13 +21,13 @@ cache_dit/cache_factory/cache_blocks/pattern_3_4_5.py,sha256=2qPnXVZwpQIm2oJ-Yrn
|
|
|
21
21
|
cache_dit/cache_factory/cache_blocks/pattern_base.py,sha256=uNcPZU8b8i_-re_X1xBHkSDQSacQO7Fa69vjbfAYxOA,25275
|
|
22
22
|
cache_dit/cache_factory/cache_blocks/pattern_utils.py,sha256=qOxoVTlYPQzPMrR06-7_Ce_lwNg6n5pt1KQrvxzAJhE,3124
|
|
23
23
|
cache_dit/cache_factory/cache_contexts/__init__.py,sha256=7uY8fX9uhpC71VNm1HH4aDIicYn-dD3kRpPQhvc9-EI,853
|
|
24
|
-
cache_dit/cache_factory/cache_contexts/cache_config.py,sha256=
|
|
24
|
+
cache_dit/cache_factory/cache_contexts/cache_config.py,sha256=UAJpxGj5DxRAcAqSeNeKF4YCj6a2hzkQ-Hj0_UOggBE,5953
|
|
25
25
|
cache_dit/cache_factory/cache_contexts/cache_context.py,sha256=fjZMEHaT1DZvUKnzY41GP0Ep8tmPEZTOsCSvG-5it5k,11269
|
|
26
|
-
cache_dit/cache_factory/cache_contexts/cache_manager.py,sha256=
|
|
27
|
-
cache_dit/cache_factory/cache_contexts/context_manager.py,sha256=
|
|
26
|
+
cache_dit/cache_factory/cache_contexts/cache_manager.py,sha256=6Orzs8prjNkCPRpsLiS_viJVbWCK8PQkT9OxC61X8zY,35398
|
|
27
|
+
cache_dit/cache_factory/cache_contexts/context_manager.py,sha256=ajxAk9YvtmKZMtPcyVfx5DeGVDtCnHgNqPq2udpaazU,1077
|
|
28
28
|
cache_dit/cache_factory/cache_contexts/prune_config.py,sha256=WMTh6zb480a0oJiYMlgI0cwCsDSVvs6UjyeJLiXbjP8,3216
|
|
29
29
|
cache_dit/cache_factory/cache_contexts/prune_context.py,sha256=ywiT9P0w_GjIFLowzUDa6jhTohNsSGfTbanZcs9wMic,6359
|
|
30
|
-
cache_dit/cache_factory/cache_contexts/prune_manager.py,sha256=
|
|
30
|
+
cache_dit/cache_factory/cache_contexts/prune_manager.py,sha256=u5z9SBLg-Ui1Um2Z80HlEsovG0b-Q_0MoPeyH4rtFXk,6123
|
|
31
31
|
cache_dit/cache_factory/cache_contexts/calibrators/__init__.py,sha256=QTbyT8xcFEjfIp9xjbnsnlnVCNvMjUc20NjB0W-s95k,6269
|
|
32
32
|
cache_dit/cache_factory/cache_contexts/calibrators/base.py,sha256=mn6ZBkChGpGwN5csrHTUGMoX6BBPvqHXSLbIExiW-EU,748
|
|
33
33
|
cache_dit/cache_factory/cache_contexts/calibrators/foca.py,sha256=nhHGs_hxwW1M942BQDMJb9-9IuHdnOxp774Jrna1bJI,891
|
|
@@ -42,8 +42,8 @@ cache_dit/cache_factory/patch_functors/functor_hunyuan_dit.py,sha256=iSo5dD5uKnj
|
|
|
42
42
|
cache_dit/cache_factory/patch_functors/functor_qwen_image_controlnet.py,sha256=D5i1Rrq1FQ49liupLcV2DW04moBqLnW9TICzfnMMzIU,10519
|
|
43
43
|
cache_dit/compile/__init__.py,sha256=FcTVzCeyypl-mxlc59_ehHL3lBNiDAFsXuRoJ-5Cfi0,56
|
|
44
44
|
cache_dit/compile/utils.py,sha256=nN2OIrSdwRR5zGxJinKDqb07pXpvTNTF3g_OgLkeeBU,3858
|
|
45
|
-
cache_dit/
|
|
46
|
-
cache_dit/
|
|
45
|
+
cache_dit/kernels/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
46
|
+
cache_dit/kernels/triton_taylorseer.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
47
47
|
cache_dit/metrics/__init__.py,sha256=Y_JrBr9XE6NKXwyXc7d_-PaX9c_rk5FKms-IYgCyHmY,936
|
|
48
48
|
cache_dit/metrics/clip_score.py,sha256=ERNCFQFJKzJdbIX9OAg-1LiSPuXUVHLOFxbf2gcENpc,3938
|
|
49
49
|
cache_dit/metrics/config.py,sha256=ieOgD9ayz722RjVzk24bSIqS2D6o7TZjGk8KeXV-OLQ,551
|
|
@@ -53,18 +53,34 @@ cache_dit/metrics/inception.py,sha256=pBVe2X6ylLPIXTG4-GWDM9DWnCviMJbJ45R3ulhktR
|
|
|
53
53
|
cache_dit/metrics/lpips.py,sha256=hrHrmdM-f2B4TKDs0xLqJO5JFaYcCjq2qNIR8oCrVkc,811
|
|
54
54
|
cache_dit/metrics/metrics.py,sha256=AZbQyoavE-djvyRUZ_EfCIrWSQbiWQFo7n2dhn7XptE,40466
|
|
55
55
|
cache_dit/parallelism/__init__.py,sha256=dheBG5_TZCuwctviMslpAEgB-B3N8F816bE51qsw_fU,210
|
|
56
|
-
cache_dit/parallelism/parallel_backend.py,sha256=
|
|
57
|
-
cache_dit/parallelism/parallel_config.py,sha256=
|
|
58
|
-
cache_dit/parallelism/parallel_interface.py,sha256=
|
|
59
|
-
cache_dit/parallelism/backends/native_diffusers/__init__.py,sha256=
|
|
60
|
-
cache_dit/parallelism/backends/native_diffusers/parallel_difffusers.py,sha256=
|
|
61
|
-
cache_dit/parallelism/backends/
|
|
56
|
+
cache_dit/parallelism/parallel_backend.py,sha256=GWpFrBjvRMRmAZYr1uek15-8aKDKKKQplGYENvbuSrg,576
|
|
57
|
+
cache_dit/parallelism/parallel_config.py,sha256=rsPeyTvSjteKPWQanm_AgGKofAPS63zZzPZUeUPoxzk,2446
|
|
58
|
+
cache_dit/parallelism/parallel_interface.py,sha256=wKZE-p2j89be7Y0MUD1IALBNBBL5QPtHGvHF1y1PgWg,2443
|
|
59
|
+
cache_dit/parallelism/backends/native_diffusers/__init__.py,sha256=5woYGGhTi1XM-briRLSEIF5uNfYSMoY2W0wPQS-6iaY,114
|
|
60
|
+
cache_dit/parallelism/backends/native_diffusers/parallel_difffusers.py,sha256=CY53v3lc36o8HXdc42xc5c0wsKdpDiswNDM3kLAuW-0,1607
|
|
61
|
+
cache_dit/parallelism/backends/native_diffusers/utils.py,sha256=VAqtv9b8PTvcoYzD_CbvtRgdg9_VbtBug_5L38PADl0,266
|
|
62
|
+
cache_dit/parallelism/backends/native_diffusers/context_parallelism/__init__.py,sha256=OT-CTUwBmWMErnVhiwZdwwYE67FOmWImtDXIrlxu8pU,3946
|
|
63
|
+
cache_dit/parallelism/backends/native_diffusers/context_parallelism/cp_plan_registers.py,sha256=RMkr3fp7zTfYao_F-t8imDiTg9JIjiHPEsVfCLmy-mE,2392
|
|
64
|
+
cache_dit/parallelism/backends/native_diffusers/context_parallelism/cp_planners.py,sha256=Z-wzi2VB8Gl8cqLSU9knGmrOJkGuXE2wzGWpO_j1Ubs,10186
|
|
65
|
+
cache_dit/parallelism/backends/native_pytorch/__init__.py,sha256=qd2zxfi1zGoo-A_x5oBa7F6ZM0TvvZTRhKUCblOCckg,107
|
|
66
|
+
cache_dit/parallelism/backends/native_pytorch/parallel_torch.py,sha256=1A-Gdv6JRueZNpwJdrVxRT7TBxwo2Bn-CwMbnlhJx0M,2047
|
|
67
|
+
cache_dit/parallelism/backends/native_pytorch/tensor_parallelism/__init__.py,sha256=M-nEf6a22UeoIjZEhIajpUpGSQzWiNn_zmWiBNU70Fs,1662
|
|
68
|
+
cache_dit/parallelism/backends/native_pytorch/tensor_parallelism/tp_plan_flux.py,sha256=1hSGbM84yBCfA2vv7smXLLfo69VmyAT_irgPi4Hs7Zo,6028
|
|
69
|
+
cache_dit/parallelism/backends/native_pytorch/tensor_parallelism/tp_plan_qwen_image.py,sha256=AcRCfML0bwGeVY0gnVfMQtW9GAWc5wO4ZhxWGNDCkKc,2644
|
|
70
|
+
cache_dit/parallelism/backends/native_pytorch/tensor_parallelism/tp_plan_registers.py,sha256=0mw3yJLjdFsnMscH-k-J23xdlJhlBtly3RQuVPAPOQA,1893
|
|
71
|
+
cache_dit/parallelism/backends/native_pytorch/tensor_parallelism/tp_plan_wan.py,sha256=j8w5gZn3MhSsacxOVguss85GSI6bdLL61FMySDZKwJc,5191
|
|
72
|
+
cache_dit/parallelism/backends/native_pytorch/tensor_parallelism/tp_planners.py,sha256=0rSroX_pVSFFWujdAqirLo3Gl6WZe-LFUtikhkOqprA,462
|
|
62
73
|
cache_dit/quantize/__init__.py,sha256=rUu0V9VRjOgwXuIUHHAI-osivNjAdUsi-jpkDbFp6Gk,278
|
|
63
|
-
cache_dit/quantize/
|
|
64
|
-
cache_dit/quantize/
|
|
65
|
-
cache_dit
|
|
66
|
-
cache_dit
|
|
67
|
-
cache_dit
|
|
68
|
-
cache_dit
|
|
69
|
-
cache_dit
|
|
70
|
-
cache_dit-1.0.
|
|
74
|
+
cache_dit/quantize/quantize_backend.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
75
|
+
cache_dit/quantize/quantize_config.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
76
|
+
cache_dit/quantize/quantize_interface.py,sha256=dKLrLF-FDdRLQq-3CnaRzkAh70P4oObJE3-qWF7goM0,882
|
|
77
|
+
cache_dit/quantize/backends/__init__.py,sha256=SL9EupOwBRzRcHZBI1ABqdHjCS9vEpFZXjA9R5ikTk8,33
|
|
78
|
+
cache_dit/quantize/backends/bitsandbytes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
79
|
+
cache_dit/quantize/backends/torchao/__init__.py,sha256=NK1widhJeKSx8ICCcyYZAWGUpGc2uKF5O7pLIcSCUDI,37
|
|
80
|
+
cache_dit/quantize/backends/torchao/quantize_ao.py,sha256=VKxmr1c4n0yw7-1Rf4yKF2hXPdAkwQwNInDB43QcuiA,6917
|
|
81
|
+
cache_dit-1.0.10.dist-info/licenses/LICENSE,sha256=Dqb07Ik2dV41s9nIdMUbiRWEfDqo7-dQeRiY7kPO8PE,3769
|
|
82
|
+
cache_dit-1.0.10.dist-info/METADATA,sha256=YKa04RRkeRPHKtXuT8vfMyeJZDhz28vklpk6aMrmjUI,32329
|
|
83
|
+
cache_dit-1.0.10.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
84
|
+
cache_dit-1.0.10.dist-info/entry_points.txt,sha256=FX2gysXaZx6NeK1iCLMcIdP8Q4_qikkIHtEmi3oWn8o,65
|
|
85
|
+
cache_dit-1.0.10.dist-info/top_level.txt,sha256=ZJDydonLEhujzz0FOkVbO-BqfzO9d_VqRHmZU-3MOZo,10
|
|
86
|
+
cache_dit-1.0.10.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|