cache-dit 1.0.6__py3-none-any.whl → 1.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cache-dit might be problematic. Click here for more details.
- cache_dit/_version.py +2 -2
- cache_dit/cache_factory/block_adapters/__init__.py +44 -0
- cache_dit/cache_factory/block_adapters/block_registers.py +1 -0
- cache_dit/cache_factory/cache_blocks/pattern_base.py +3 -8
- cache_dit/parallelism/backends/parallel_difffusers.py +20 -3
- cache_dit/utils.py +4 -0
- {cache_dit-1.0.6.dist-info → cache_dit-1.0.7.dist-info}/METADATA +12 -14
- {cache_dit-1.0.6.dist-info → cache_dit-1.0.7.dist-info}/RECORD +12 -12
- {cache_dit-1.0.6.dist-info → cache_dit-1.0.7.dist-info}/WHEEL +0 -0
- {cache_dit-1.0.6.dist-info → cache_dit-1.0.7.dist-info}/entry_points.txt +0 -0
- {cache_dit-1.0.6.dist-info → cache_dit-1.0.7.dist-info}/licenses/LICENSE +0 -0
- {cache_dit-1.0.6.dist-info → cache_dit-1.0.7.dist-info}/top_level.txt +0 -0
cache_dit/_version.py
CHANGED
|
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
|
|
|
28
28
|
commit_id: COMMIT_ID
|
|
29
29
|
__commit_id__: COMMIT_ID
|
|
30
30
|
|
|
31
|
-
__version__ = version = '1.0.
|
|
32
|
-
__version_tuple__ = version_tuple = (1, 0,
|
|
31
|
+
__version__ = version = '1.0.7'
|
|
32
|
+
__version_tuple__ = version_tuple = (1, 0, 7)
|
|
33
33
|
|
|
34
34
|
__commit_id__ = commit_id = None
|
|
@@ -577,3 +577,47 @@ def hunyuanditpag_adapter(pipe, **kwargs) -> BlockAdapter:
|
|
|
577
577
|
patch_functor=HunyuanDiTPatchFunctor(),
|
|
578
578
|
**kwargs,
|
|
579
579
|
)
|
|
580
|
+
|
|
581
|
+
|
|
582
|
+
@BlockAdapterRegistry.register("Kandinsky5")
|
|
583
|
+
def kandinsky5_adapter(pipe, **kwargs) -> BlockAdapter:
|
|
584
|
+
try:
|
|
585
|
+
from diffusers import Kandinsky5Transformer3DModel
|
|
586
|
+
|
|
587
|
+
assert isinstance(pipe.transformer, Kandinsky5Transformer3DModel)
|
|
588
|
+
return BlockAdapter(
|
|
589
|
+
pipe=pipe,
|
|
590
|
+
transformer=pipe.transformer,
|
|
591
|
+
blocks=pipe.transformer.visual_transformer_blocks,
|
|
592
|
+
forward_pattern=ForwardPattern.Pattern_3, # or Pattern_2
|
|
593
|
+
has_separate_cfg=True,
|
|
594
|
+
check_forward_pattern=False,
|
|
595
|
+
check_num_outputs=False,
|
|
596
|
+
**kwargs,
|
|
597
|
+
)
|
|
598
|
+
except ImportError:
|
|
599
|
+
raise ImportError(
|
|
600
|
+
"Kandinsky5Transformer3DModel is not available in the current diffusers version. "
|
|
601
|
+
"Please upgrade diffusers>=0.36.dev0 to use this adapter."
|
|
602
|
+
)
|
|
603
|
+
|
|
604
|
+
|
|
605
|
+
@BlockAdapterRegistry.register("PRX")
|
|
606
|
+
def prx_adapter(pipe, **kwargs) -> BlockAdapter:
|
|
607
|
+
try:
|
|
608
|
+
from diffusers import PRXTransformer2DModel
|
|
609
|
+
|
|
610
|
+
assert isinstance(pipe.transformer, PRXTransformer2DModel)
|
|
611
|
+
return BlockAdapter(
|
|
612
|
+
pipe=pipe,
|
|
613
|
+
transformer=pipe.transformer,
|
|
614
|
+
blocks=pipe.transformer.blocks,
|
|
615
|
+
forward_pattern=ForwardPattern.Pattern_3,
|
|
616
|
+
check_num_outputs=False,
|
|
617
|
+
**kwargs,
|
|
618
|
+
)
|
|
619
|
+
except ImportError:
|
|
620
|
+
raise ImportError(
|
|
621
|
+
"PRXTransformer2DModel is not available in the current diffusers version. "
|
|
622
|
+
"Please upgrade diffusers>=0.36.dev0 to use this adapter."
|
|
623
|
+
)
|
|
@@ -139,14 +139,9 @@ class CachedBlocks_Pattern_Base(torch.nn.Module):
|
|
|
139
139
|
*args,
|
|
140
140
|
**kwargs,
|
|
141
141
|
)
|
|
142
|
-
|
|
143
|
-
hidden_states, encoder_hidden_states
|
|
144
|
-
|
|
145
|
-
hidden_states, encoder_hidden_states = (
|
|
146
|
-
encoder_hidden_states,
|
|
147
|
-
hidden_states,
|
|
148
|
-
)
|
|
149
|
-
|
|
142
|
+
hidden_states, encoder_hidden_states = self._process_block_outputs(
|
|
143
|
+
hidden_states, encoder_hidden_states
|
|
144
|
+
)
|
|
150
145
|
return hidden_states, encoder_hidden_states
|
|
151
146
|
|
|
152
147
|
@torch.compiler.disable
|
|
@@ -1,6 +1,10 @@
|
|
|
1
1
|
import torch
|
|
2
2
|
|
|
3
3
|
from typing import Optional
|
|
4
|
+
from cache_dit.logger import init_logger
|
|
5
|
+
|
|
6
|
+
logger = init_logger(__name__)
|
|
7
|
+
|
|
4
8
|
|
|
5
9
|
try:
|
|
6
10
|
from diffusers import ContextParallelConfig
|
|
@@ -24,10 +28,18 @@ def maybe_enable_parallelism(
|
|
|
24
28
|
transformer: torch.nn.Module,
|
|
25
29
|
parallelism_config: Optional[ParallelismConfig],
|
|
26
30
|
) -> torch.nn.Module:
|
|
27
|
-
assert isinstance(transformer, ModelMixin)
|
|
31
|
+
assert isinstance(transformer, ModelMixin), (
|
|
32
|
+
"transformer must be an instance of diffusers' ModelMixin, "
|
|
33
|
+
f"but got {type(transformer)}"
|
|
34
|
+
)
|
|
28
35
|
if parallelism_config is None:
|
|
29
36
|
return transformer
|
|
30
37
|
|
|
38
|
+
assert isinstance(parallelism_config, ParallelismConfig), (
|
|
39
|
+
"parallelism_config must be an instance of ParallelismConfig"
|
|
40
|
+
f" but got {type(parallelism_config)}"
|
|
41
|
+
)
|
|
42
|
+
|
|
31
43
|
if (
|
|
32
44
|
parallelism_config.backend == ParallelismBackend.NATIVE_DIFFUSER
|
|
33
45
|
and native_diffusers_parallelism_available()
|
|
@@ -43,10 +55,15 @@ def maybe_enable_parallelism(
|
|
|
43
55
|
)
|
|
44
56
|
if cp_config is not None:
|
|
45
57
|
if hasattr(transformer, "enable_parallelism"):
|
|
46
|
-
if hasattr(transformer, "set_attention_backend"):
|
|
58
|
+
if hasattr(transformer, "set_attention_backend"):
|
|
47
59
|
# Now only _native_cudnn is supported for parallelism
|
|
48
60
|
# issue: https://github.com/huggingface/diffusers/pull/12443
|
|
49
|
-
transformer.set_attention_backend("_native_cudnn")
|
|
61
|
+
transformer.set_attention_backend("_native_cudnn")
|
|
62
|
+
logger.warning(
|
|
63
|
+
"Set attention backend to _native_cudnn for parallelism because of "
|
|
64
|
+
"the issue: https://github.com/huggingface/diffusers/pull/12443"
|
|
65
|
+
)
|
|
66
|
+
|
|
50
67
|
transformer.enable_parallelism(config=cp_config)
|
|
51
68
|
else:
|
|
52
69
|
raise ValueError(
|
cache_dit/utils.py
CHANGED
|
@@ -183,6 +183,8 @@ def strify(
|
|
|
183
183
|
cached_steps = None
|
|
184
184
|
cache_type = cache_options.get("cache_type", CacheType.NONE)
|
|
185
185
|
|
|
186
|
+
stats = None
|
|
187
|
+
|
|
186
188
|
if cache_type == CacheType.NONE:
|
|
187
189
|
return "NONE"
|
|
188
190
|
else:
|
|
@@ -217,6 +219,8 @@ def strify(
|
|
|
217
219
|
return "T0O0"
|
|
218
220
|
|
|
219
221
|
def parallelism_str():
|
|
222
|
+
if stats is None:
|
|
223
|
+
return ""
|
|
220
224
|
parallelism_config: ParallelismConfig = stats.parallelism_config
|
|
221
225
|
if parallelism_config is not None:
|
|
222
226
|
return f"_{parallelism_config.strify()}"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: cache_dit
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.7
|
|
4
4
|
Summary: A Unified, Flexible and Training-free Cache Acceleration Framework for 🤗Diffusers.
|
|
5
5
|
Author: DefTruth, vipshop.com, etc.
|
|
6
6
|
Maintainer: DefTruth, vipshop.com, etc
|
|
@@ -45,17 +45,15 @@ Dynamic: provides-extra
|
|
|
45
45
|
Dynamic: requires-dist
|
|
46
46
|
Dynamic: requires-python
|
|
47
47
|
|
|
48
|
-
|
|
48
|
+
📚English | <a href="./README_CN.md">📚中文阅读 </a>
|
|
49
49
|
|
|
50
50
|
<div align="center">
|
|
51
51
|
<img src=https://github.com/vipshop/cache-dit/raw/main/assets/cache-dit-logo.png height="120">
|
|
52
52
|
<p align="center">
|
|
53
53
|
A <b>Unified</b>, Flexible and Training-free <b>Cache Acceleration</b> Framework for <b>🤗Diffusers</b> <br>
|
|
54
|
-
♥️ Cache Acceleration with <b>One-line</b> Code ~ ♥️
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
🔥<b><a href="./docs/User_Guide.md">DBCache</a> | <a href="./docs/User_Guide.md">DBPrune</a> | <a href="./docs/User_Guide.md">Hybird TaylorSeer</a> | <a href="./docs/User_Guide.md">Hybird Cache CFG</a></b>🔥 <br>
|
|
58
|
-
🔥<b><a href="./docs/User_Guide.md">Hybrid Context Paralleism</a> | <a href="./docs/User_Guide.md">PyTorch Native</a> | <a href="./docs/User_Guide.md">SOTA</a></b>🔥
|
|
54
|
+
♥️ Cache Acceleration with <b>One-line</b> Code ~ ♥️ <br>
|
|
55
|
+
🔥<b><a href="./docs/User_Guide.md">DBCache</a> | <a href="./docs/User_Guide.md">DBPrune</a> | <a href="./docs/User_Guide.md">Hybrid TaylorSeer</a> | <a href="./docs/User_Guide.md">Hybrid Cache CFG</a></b>🔥 <br>
|
|
56
|
+
🔥<b><a href="./docs/User_Guide.md">Hybrid Context Paralleism</a> | <a href="./docs/User_Guide.md">Diffusers Native</a> | <a href="./docs/User_Guide.md">SOTA</a></b>🔥
|
|
59
57
|
</p>
|
|
60
58
|
<div align='center'>
|
|
61
59
|
<img src=https://img.shields.io/badge/Language-Python-brightgreen.svg >
|
|
@@ -198,7 +196,7 @@ You can install the stable release of cache-dit from PyPI, or the latest develop
|
|
|
198
196
|
- **[🎉Easy New Model Integration](./docs/User_Guide.md#automatic-block-adapter)**: Features like **Unified Cache APIs**, **Forward Pattern Matching**, **Automatic Block Adapter**, **Hybrid Forward Pattern**, and **Patch Functor** make it highly functional and flexible. For example, we achieved 🎉 Day 1 support for [HunyuanImage-2.1](https://github.com/Tencent-Hunyuan/HunyuanImage-2.1) with 1.7x speedup w/o precision loss—even before it was available in the Diffusers library.
|
|
199
197
|
- **[🎉State-of-the-Art Performance](./bench/)**: Compared with algorithms including Δ-DiT, Chipmunk, FORA, DuCa, TaylorSeer and FoCa, cache-dit achieved the **SOTA** performance w/ **7.4x↑🎉** speedup on ClipScore!
|
|
200
198
|
- **[🎉Support for 4/8-Steps Distilled Models](./bench/)**: Surprisingly, cache-dit's **DBCache** works for extremely few-step distilled models—something many other methods fail to do.
|
|
201
|
-
- **[🎉Compatibility with Other Optimizations](./docs/User_Guide.md#️torch-compile)**: Designed to work seamlessly with torch.compile,
|
|
199
|
+
- **[🎉Compatibility with Other Optimizations](./docs/User_Guide.md#️torch-compile)**: Designed to work seamlessly with torch.compile, Quantization ([torchao](./examples/quantize/), [🔥nunchaku](./examples/quantize/)), CPU or Sequential Offloading, **[🔥Context Parallelism](./docs/User_Guide.md/#️hybrid-context-parallelism)**, Tensor Parallelism, etc.
|
|
202
200
|
- **[🎉Hybrid Cache Acceleration](./docs/User_Guide.md#taylorseer-calibrator)**: Now supports hybrid **Block-wise Cache + Calibrator** schemes (e.g., DBCache or DBPrune + TaylorSeerCalibrator). DBCache or DBPrune acts as the **Indicator** to decide *when* to cache, while the Calibrator decides *how* to cache. More mainstream cache acceleration algorithms (e.g., FoCa) will be supported in the future, along with additional benchmarks—stay tuned for updates!
|
|
203
201
|
- **[🤗Diffusers Ecosystem Integration](https://huggingface.co/docs/diffusers/main/en/optimization/cache_dit)**: 🔥**cache-dit** has joined the Diffusers community ecosystem as the **first** DiT-specific cache acceleration framework! Check out the documentation here: <a href="https://huggingface.co/docs/diffusers/main/en/optimization/cache_dit"><img src=https://img.shields.io/badge/🤗Diffusers-ecosystem-yellow.svg ></a>
|
|
204
202
|
|
|
@@ -206,14 +204,12 @@ You can install the stable release of cache-dit from PyPI, or the latest develop
|
|
|
206
204
|
|
|
207
205
|
## 🔥Important News
|
|
208
206
|
|
|
209
|
-
- 2025.10.20: 🔥Now cache-dit supported the [Hybrid Cache + Context Parallelism](./docs/User_Guide.md/#️hybrid-context-parallelism) scheme!🔥
|
|
207
|
+
- 2025.10.20: 🔥Now cache-dit supported the **[Hybrid Cache + Context Parallelism](./docs/User_Guide.md/#️hybrid-context-parallelism)** scheme!🔥
|
|
210
208
|
- 2025.10.16: 🎉cache-dit + [**🔥nunchaku 4-bits**](https://github.com/nunchaku-tech/nunchaku) supported: [Qwen-Image-Lightning 4/8 steps](./examples/quantize/).
|
|
211
209
|
- 2025.10.15: 🎉cache-dit now supported [**🔥nunchaku**](https://github.com/nunchaku-tech/nunchaku): Qwen-Image/FLUX.1 [4-bits examples](./examples/quantize/)
|
|
212
210
|
- 2025.10.13: 🎉cache-dit achieved the **SOTA** performance w/ **7.4x↑🎉** speedup on ClipScore!
|
|
213
211
|
- 2025.10.10: 🔥[**Qwen-Image-ControlNet-Inpainting**](https://huggingface.co/InstantX/Qwen-Image-ControlNet-Inpainting) **2.3x↑🎉** speedup! Check the [example](https://github.com/vipshop/cache-dit/blob/main/examples/pipeline/run_qwen_image_controlnet_inpaint.py).
|
|
214
212
|
- 2025.09.26: 🔥[**Qwen-Image-Edit-Plus(2509)**](https://github.com/QwenLM/Qwen-Image) **2.1x↑🎉** speedup! Please check the [example](https://github.com/vipshop/cache-dit/blob/main/examples/pipeline/run_qwen_image_edit_plus.py).
|
|
215
|
-
- 2025.09.25: 🎉The **first API-stable version (v1.0.0)** of cache-dit has finally been released!
|
|
216
|
-
- 2025.09.25: 🔥**cache-dit** has joined the Diffusers community ecosystem: <a href="https://huggingface.co/docs/diffusers/main/en/optimization/cache_dit"><img src=https://img.shields.io/badge/🤗Diffusers-ecosystem-yellow.svg ></a>
|
|
217
213
|
- 2025.09.10: 🎉Day 1 support [**HunyuanImage-2.1**](https://github.com/Tencent-Hunyuan/HunyuanImage-2.1) with **1.7x↑🎉** speedup! Check this [example](https://github.com/vipshop/cache-dit/blob/main/examples/pipeline/run_hunyuan_image_2.1.py).
|
|
218
214
|
- 2025.09.08: 🔥[**Qwen-Image-Lightning**](https://github.com/vipshop/cache-dit/blob/main/examples/pipeline/run_qwen_image_lightning.py) **7.1/3.5 steps🎉** inference with **[DBCache: F16B16](https://github.com/vipshop/cache-dit)**.
|
|
219
215
|
- 2025.09.03: 🎉[**Wan2.2-MoE**](https://github.com/Wan-Video) **2.4x↑🎉** speedup! Please refer to [run_wan_2.2.py](https://github.com/vipshop/cache-dit/blob/main/examples/pipeline/run_wan_2.2.py) as an example.
|
|
@@ -223,6 +219,8 @@ You can install the stable release of cache-dit from PyPI, or the latest develop
|
|
|
223
219
|
<details>
|
|
224
220
|
<summary>Previous News</summary>
|
|
225
221
|
|
|
222
|
+
- 2025.09.25: 🎉The **first API-stable version (v1.0.0)** of cache-dit has finally been released!
|
|
223
|
+
- 2025.09.25: 🔥**cache-dit** has joined the Diffusers community ecosystem: <a href="https://huggingface.co/docs/diffusers/main/en/optimization/cache_dit"><img src=https://img.shields.io/badge/🤗Diffusers-ecosystem-yellow.svg ></a>
|
|
226
224
|
- 2025.09.08: 🎉First caching mechanism in [Wan2.2](https://github.com/Wan-Video/Wan2.2) with **[cache-dit](https://github.com/vipshop/cache-dit)**, check this [PR](https://github.com/Wan-Video/Wan2.2/pull/127) for more details.
|
|
227
225
|
- 2025.09.08: 🎉First caching mechanism in [Qwen-Image-Lightning](https://github.com/ModelTC/Qwen-Image-Lightning) with **[cache-dit](https://github.com/vipshop/cache-dit)**, check this [PR](https://github.com/ModelTC/Qwen-Image-Lightning/pull/35).
|
|
228
226
|
- 2025.08.10: 🔥[**FLUX.1-Kontext-dev**](https://huggingface.co/black-forest-labs/FLUX.1-Kontext-dev) is supported! Please refer [run_flux_kontext.py](https://github.com/vipshop/cache-dit/blob/main/examples/pipeline/run_flux_kontext.py) as an example.
|
|
@@ -245,13 +243,13 @@ For more advanced features such as **Unified Cache APIs**, **Forward Pattern Mat
|
|
|
245
243
|
- [📚Forward Pattern Matching](./docs/User_Guide.md#forward-pattern-matching)
|
|
246
244
|
- [📚Cache with One-line Code](./docs/User_Guide.md#%EF%B8%8Fcache-acceleration-with-one-line-code)
|
|
247
245
|
- [🔥Automatic Block Adapter](./docs/User_Guide.md#automatic-block-adapter)
|
|
248
|
-
- [📚
|
|
246
|
+
- [📚Hybrid Forward Pattern](./docs/User_Guide.md#hybrid-forward-pattern)
|
|
249
247
|
- [📚Implement Patch Functor](./docs/User_Guide.md#implement-patch-functor)
|
|
250
248
|
- [🤖Cache Acceleration Stats](./docs/User_Guide.md#cache-acceleration-stats-summary)
|
|
251
249
|
- [⚡️DBCache: Dual Block Cache](./docs/User_Guide.md#️dbcache-dual-block-cache)
|
|
252
250
|
- [⚡️DBPrune: Dynamic Block Prune](./docs/User_Guide.md#️dbprune-dynamic-block-prune)
|
|
253
|
-
- [🔥Hybrid TaylorSeer](./docs/User_Guide.md#taylorseer-calibrator)
|
|
254
251
|
- [⚡️Hybrid Cache CFG](./docs/User_Guide.md#️hybrid-cache-cfg)
|
|
252
|
+
- [🔥Hybrid TaylorSeer Calibrator](./docs/User_Guide.md#taylorseer-calibrator)
|
|
255
253
|
- [⚡️Hybrid Context Parallelism](./docs/User_Guide.md#context-paralleism)
|
|
256
254
|
- [🛠Metrics Command Line](./docs/User_Guide.md#metrics-cli)
|
|
257
255
|
- [⚙️Torch Compile](./docs/User_Guide.md#️torch-compile)
|
|
@@ -275,7 +273,7 @@ How to contribute? Star ⭐️ this repo to support us or check [CONTRIBUTE.md](
|
|
|
275
273
|
|
|
276
274
|
## 🎉Projects Using CacheDiT
|
|
277
275
|
|
|
278
|
-
Here is a curated list of open-source projects integrating **CacheDiT**, including popular repositories like [jetson-containers](https://github.com/dusty-nv/jetson-containers/blob/master/packages/diffusion/cache_edit/build.sh) , [flux-fast](https://github.com/huggingface/flux-fast) , and [sdnext](https://github.com/vladmandic/sdnext/
|
|
276
|
+
Here is a curated list of open-source projects integrating **CacheDiT**, including popular repositories like [jetson-containers](https://github.com/dusty-nv/jetson-containers/blob/master/packages/diffusion/cache_edit/build.sh) , [flux-fast](https://github.com/huggingface/flux-fast) , and [sdnext](https://github.com/vladmandic/sdnext/discussions/4269) . **CacheDiT** has also been **recommended** by [Wan2.2](https://github.com/Wan-Video/Wan2.2) , [Qwen-Image-Lightning](https://github.com/ModelTC/Qwen-Image-Lightning) , [Qwen-Image](https://github.com/QwenLM/Qwen-Image) , and <a href="https://huggingface.co/docs/diffusers/main/en/optimization/cache_dit"><img src="https://img.shields.io/badge/🤗Diffusers-ecosystem-yellow.svg"></a> , among others. We would be grateful if you could let us know if you have used CacheDiT.
|
|
279
277
|
|
|
280
278
|
## ©️Acknowledgements
|
|
281
279
|
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
cache_dit/__init__.py,sha256=HZb04M7AHCfk9DaEAGApGJ2lCM-rsP6pbsNQxsQudi0,1743
|
|
2
|
-
cache_dit/_version.py,sha256=
|
|
2
|
+
cache_dit/_version.py,sha256=xUX1oSOk6hTPREy9SfhUBjaOBMJucMgoQViQ3e2Ce9A,704
|
|
3
3
|
cache_dit/logger.py,sha256=0zsu42hN-3-rgGC_C29ms1IvVpV4_b4_SwJCKSenxBE,4304
|
|
4
|
-
cache_dit/utils.py,sha256=
|
|
4
|
+
cache_dit/utils.py,sha256=3NcEb324fNY0NYnrBTjsLURKQuckKeFe3V9Dfc_g4sc,17851
|
|
5
5
|
cache_dit/cache_factory/.gitignore,sha256=5Cb-qT9wsTUoMJ7vACDF7ZcLpAXhi5v-xdcWSRit988,23
|
|
6
6
|
cache_dit/cache_factory/__init__.py,sha256=5UjrpxLVlmjHttTL0O14fD5oU5uKI3FKYevL613ibFQ,1848
|
|
7
7
|
cache_dit/cache_factory/cache_interface.py,sha256=244uTVx83hpCpbCDgEOydi5HqG7hKHHzEoz1ApJW6lI,14627
|
|
@@ -9,16 +9,16 @@ cache_dit/cache_factory/cache_types.py,sha256=QnWfaS52UOXQtnoCUOwwz4ziY0dyBta6vQ
|
|
|
9
9
|
cache_dit/cache_factory/forward_pattern.py,sha256=FumlCuZ-TSmSYH0hGBHctSJ-oGLCftdZjLygqhsmdR4,2258
|
|
10
10
|
cache_dit/cache_factory/params_modifier.py,sha256=2T98IbepAolWW6GwQsqUDsRzu0k65vo7BOrN3V8mKog,3606
|
|
11
11
|
cache_dit/cache_factory/utils.py,sha256=S3SD6Zhexzhkqnmfo830v6oNLm8stZe32nF4VdxD_bA,2497
|
|
12
|
-
cache_dit/cache_factory/block_adapters/__init__.py,sha256=
|
|
12
|
+
cache_dit/cache_factory/block_adapters/__init__.py,sha256=eeBcWUMIvS-x3GcD1LNesW2SuB9V5mtwG9MoUBWHsL8,19765
|
|
13
13
|
cache_dit/cache_factory/block_adapters/block_adapters.py,sha256=2TVK_KqiYXC7AKZ2s07fzdOzUoeUBc9P1SzQtLVzhf4,22249
|
|
14
|
-
cache_dit/cache_factory/block_adapters/block_registers.py,sha256=
|
|
14
|
+
cache_dit/cache_factory/block_adapters/block_registers.py,sha256=KU0cqtLYRlij2WvuQ6erqZbxUWkb6DjvmY_sB3o_fQM,2594
|
|
15
15
|
cache_dit/cache_factory/cache_adapters/__init__.py,sha256=py71WGD3JztQ1uk6qdLVbzYcQ1rvqFidNNaQYo7tqTo,79
|
|
16
16
|
cache_dit/cache_factory/cache_adapters/cache_adapter.py,sha256=WYrgV3DKxOxttl-wEKymyKIB1Po0eW73Q2_vOlGEKdQ,24080
|
|
17
17
|
cache_dit/cache_factory/cache_blocks/__init__.py,sha256=cpxzmDcUhbXcReHqaKSnWyEEbIg1H91Pz5hE3z9Xj3k,9984
|
|
18
18
|
cache_dit/cache_factory/cache_blocks/offload_utils.py,sha256=wusgcqaCrwEjvv7Guy-6VXhNOgPPUrBV2sSVuRmGuvo,3513
|
|
19
19
|
cache_dit/cache_factory/cache_blocks/pattern_0_1_2.py,sha256=j4bTafqU5DLQhzP_X5XwOk-QUVLWkGrX-Q6JZvBGHh0,666
|
|
20
20
|
cache_dit/cache_factory/cache_blocks/pattern_3_4_5.py,sha256=2qPnXVZwpQIm2oJ-Yrn3Avqi3BcXtE2133jPIL_LhK8,19595
|
|
21
|
-
cache_dit/cache_factory/cache_blocks/pattern_base.py,sha256=
|
|
21
|
+
cache_dit/cache_factory/cache_blocks/pattern_base.py,sha256=uNcPZU8b8i_-re_X1xBHkSDQSacQO7Fa69vjbfAYxOA,25275
|
|
22
22
|
cache_dit/cache_factory/cache_blocks/pattern_utils.py,sha256=qOxoVTlYPQzPMrR06-7_Ce_lwNg6n5pt1KQrvxzAJhE,3124
|
|
23
23
|
cache_dit/cache_factory/cache_contexts/__init__.py,sha256=7uY8fX9uhpC71VNm1HH4aDIicYn-dD3kRpPQhvc9-EI,853
|
|
24
24
|
cache_dit/cache_factory/cache_contexts/cache_config.py,sha256=G0PVWgckDqeyARc72Ne_0lRtO_LftsOeMERRhbh2gCA,5739
|
|
@@ -56,13 +56,13 @@ cache_dit/parallelism/__init__.py,sha256=dheBG5_TZCuwctviMslpAEgB-B3N8F816bE51qs
|
|
|
56
56
|
cache_dit/parallelism/parallel_backend.py,sha256=js1soTMenLeAyPMsBgdI3gWcdXoqjWgBD-PuFEywMr0,508
|
|
57
57
|
cache_dit/parallelism/parallel_config.py,sha256=bu24sRSzJMmH7FZqzUPTcT6tAzQ20-FAqAEvGV3Q1Fw,1733
|
|
58
58
|
cache_dit/parallelism/parallel_interface.py,sha256=tsiIdHosTmRbeRg0z9q0eMQlx-7vefmSIlc56OWnuMg,2205
|
|
59
|
-
cache_dit/parallelism/backends/parallel_difffusers.py,sha256=
|
|
59
|
+
cache_dit/parallelism/backends/parallel_difffusers.py,sha256=YQkCJ1yq1OomZLyRLtGMaPSNWbDeAWGx9XuObVJ_85I,2499
|
|
60
60
|
cache_dit/quantize/__init__.py,sha256=kWYoMAyZgBXu9BJlZjTQ0dRffW9GqeeY9_iTkXrb70A,59
|
|
61
61
|
cache_dit/quantize/quantize_ao.py,sha256=bbEUwsrMp3bMuRw8qJZREIvCHaJRQoZyfMjlu4ImRMI,6315
|
|
62
62
|
cache_dit/quantize/quantize_interface.py,sha256=2s_R7xPSKuJeFpEGeLwRxnq_CqJcBG3a3lzyW5wh-UM,1241
|
|
63
|
-
cache_dit-1.0.
|
|
64
|
-
cache_dit-1.0.
|
|
65
|
-
cache_dit-1.0.
|
|
66
|
-
cache_dit-1.0.
|
|
67
|
-
cache_dit-1.0.
|
|
68
|
-
cache_dit-1.0.
|
|
63
|
+
cache_dit-1.0.7.dist-info/licenses/LICENSE,sha256=Dqb07Ik2dV41s9nIdMUbiRWEfDqo7-dQeRiY7kPO8PE,3769
|
|
64
|
+
cache_dit-1.0.7.dist-info/METADATA,sha256=I0Vb-ZqUHblKOWwXyCyZVfcllq1lLm7ML2X7U6TJs4s,29475
|
|
65
|
+
cache_dit-1.0.7.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
66
|
+
cache_dit-1.0.7.dist-info/entry_points.txt,sha256=FX2gysXaZx6NeK1iCLMcIdP8Q4_qikkIHtEmi3oWn8o,65
|
|
67
|
+
cache_dit-1.0.7.dist-info/top_level.txt,sha256=ZJDydonLEhujzz0FOkVbO-BqfzO9d_VqRHmZU-3MOZo,10
|
|
68
|
+
cache_dit-1.0.7.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|