cache-dit 0.2.28__py3-none-any.whl → 0.2.29__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cache-dit might be problematic. Click here for more details.

@@ -1,9 +1,11 @@
1
+ import torch
1
2
  from typing import Any, Tuple, List
2
3
  from diffusers import DiffusionPipeline
3
4
  from cache_dit.cache_factory.cache_types import CacheType
4
5
  from cache_dit.cache_factory.block_adapters import BlockAdapter
5
6
  from cache_dit.cache_factory.block_adapters import BlockAdapterRegistry
6
7
  from cache_dit.cache_factory.cache_adapters import CachedAdapter
8
+ from cache_dit.cache_factory.cache_contexts import CachedContextManager
7
9
 
8
10
  from cache_dit.logger import init_logger
9
11
 
@@ -12,7 +14,7 @@ logger = init_logger(__name__)
12
14
 
13
15
  def enable_cache(
14
16
  # DiffusionPipeline or BlockAdapter
15
- pipe_or_adapter: DiffusionPipeline | BlockAdapter | Any,
17
+ pipe_or_adapter: DiffusionPipeline | BlockAdapter,
16
18
  # Cache context kwargs
17
19
  Fn_compute_blocks: int = 8,
18
20
  Bn_compute_blocks: int = 0,
@@ -30,7 +32,7 @@ def enable_cache(
30
32
  taylorseer_cache_type: str = "residual",
31
33
  taylorseer_order: int = 2,
32
34
  **other_cache_context_kwargs,
33
- ) -> DiffusionPipeline | Any:
35
+ ) -> BlockAdapter:
34
36
  r"""
35
37
  Unified Cache API for almost Any Diffusion Transformers (with Transformer Blocks
36
38
  that match the specific Input and Output patterns).
@@ -129,16 +131,9 @@ def enable_cache(
129
131
  cache_context_kwargs["taylorseer_cache_type"] = taylorseer_cache_type
130
132
  cache_context_kwargs["taylorseer_order"] = taylorseer_order
131
133
 
132
- if isinstance(pipe_or_adapter, BlockAdapter):
133
- return CachedAdapter.apply(
134
- pipe=None,
135
- block_adapter=pipe_or_adapter,
136
- **cache_context_kwargs,
137
- )
138
- elif isinstance(pipe_or_adapter, DiffusionPipeline):
134
+ if isinstance(pipe_or_adapter, (DiffusionPipeline, BlockAdapter)):
139
135
  return CachedAdapter.apply(
140
- pipe=pipe_or_adapter,
141
- block_adapter=None,
136
+ pipe_or_adapter,
142
137
  **cache_context_kwargs,
143
138
  )
144
139
  else:
@@ -149,6 +144,81 @@ def enable_cache(
149
144
  )
150
145
 
151
146
 
147
+ def disable_cache(
148
+ # DiffusionPipeline or BlockAdapter
149
+ pipe_or_adapter: DiffusionPipeline | BlockAdapter,
150
+ ):
151
+ from cache_dit.cache_factory.cache_blocks.utils import (
152
+ remove_cached_stats,
153
+ )
154
+
155
+ def _disable_blocks(blocks: torch.nn.ModuleList):
156
+ if blocks is None:
157
+ return
158
+ if hasattr(blocks, "_forward_pattern"):
159
+ del blocks._forward_pattern
160
+ if hasattr(blocks, "_cache_context_kwargs"):
161
+ del blocks._cache_context_kwargs
162
+ remove_cached_stats(blocks)
163
+
164
+ def _disable_transformer(transformer: torch.nn.Module):
165
+ if transformer is None or not BlockAdapter.is_cached(transformer):
166
+ return
167
+ if original_forward := getattr(transformer, "_original_forward"):
168
+ transformer.forward = original_forward.__get__(transformer)
169
+ del transformer._original_forward
170
+ if hasattr(transformer, "_is_cached"):
171
+ del transformer._is_cached
172
+ if hasattr(transformer, "_forward_pattern"):
173
+ del transformer._forward_pattern
174
+ if hasattr(transformer, "_has_separate_cfg"):
175
+ del transformer._has_separate_cfg
176
+ if hasattr(transformer, "_cache_context_kwargs"):
177
+ del transformer._cache_context_kwargs
178
+ remove_cached_stats(transformer)
179
+ for blocks in BlockAdapter.find_blocks(transformer):
180
+ _disable_blocks(blocks)
181
+
182
+ def _disable_pipe(pipe: DiffusionPipeline):
183
+ if pipe is None or not BlockAdapter.is_cached(pipe):
184
+ return
185
+ if original_call := getattr(pipe, "_original_call"):
186
+ pipe.__class__.__call__ = original_call
187
+ del pipe.__class__._original_call
188
+ if cache_manager := getattr(pipe, "_cache_manager"):
189
+ assert isinstance(cache_manager, CachedContextManager)
190
+ cache_manager.clear_contexts()
191
+ del pipe._cache_manager
192
+ if hasattr(pipe, "_is_cached"):
193
+ del pipe.__class__._is_cached
194
+ if hasattr(pipe, "_cache_context_kwargs"):
195
+ del pipe._cache_context_kwargs
196
+ remove_cached_stats(pipe)
197
+
198
+ if isinstance(pipe_or_adapter, DiffusionPipeline):
199
+ pipe = pipe_or_adapter
200
+ _disable_pipe(pipe)
201
+ if hasattr(pipe, "transformer"):
202
+ _disable_transformer(pipe.transformer)
203
+ if hasattr(pipe, "transformer_2"): # Wan 2.2
204
+ _disable_transformer(pipe.transformer_2)
205
+ pipe_cls_name = pipe.__class__.__name__
206
+ logger.warning(f"Cache Acceleration is disabled for: {pipe_cls_name}")
207
+ elif isinstance(pipe_or_adapter, BlockAdapter):
208
+ # BlockAdapter
209
+ adapter = pipe_or_adapter
210
+ BlockAdapter.assert_normalized(adapter)
211
+ _disable_pipe(adapter.pipe)
212
+ for transformer in BlockAdapter.flatten(adapter.transformer):
213
+ _disable_transformer(transformer)
214
+ for blocks in BlockAdapter.flatten(adapter.blocks):
215
+ _disable_blocks(blocks)
216
+ pipe_cls_name = adapter.pipe.__class__.__name__
217
+ logger.warning(f"Cache Acceleration is disabled for: {pipe_cls_name}")
218
+ else:
219
+ pass # do nothing
220
+
221
+
152
222
  def supported_pipelines(
153
223
  **kwargs,
154
224
  ) -> Tuple[int, List[str]]:
@@ -30,7 +30,7 @@ class ChromaPatchFunctor(PatchFunctor):
30
30
  blocks: torch.nn.ModuleList = None,
31
31
  **kwargs,
32
32
  ) -> ChromaTransformer2DModel:
33
- if getattr(transformer, "_is_patched", False):
33
+ if hasattr(transformer, "_is_patched"):
34
34
  return transformer
35
35
 
36
36
  if blocks is None:
@@ -31,7 +31,7 @@ class FluxPatchFunctor(PatchFunctor):
31
31
  **kwargs,
32
32
  ) -> FluxTransformer2DModel:
33
33
 
34
- if getattr(transformer, "_is_patched", False):
34
+ if hasattr(transformer, "_is_patched"):
35
35
  return transformer
36
36
 
37
37
  if blocks is None:
cache_dit/utils.py CHANGED
@@ -5,7 +5,8 @@ import numpy as np
5
5
  from pprint import pprint
6
6
  from diffusers import DiffusionPipeline
7
7
 
8
- from typing import Dict, Any
8
+ from typing import Dict, Any, List, Union
9
+ from cache_dit.cache_factory import BlockAdapter
9
10
  from cache_dit.logger import init_logger
10
11
 
11
12
 
@@ -29,9 +30,168 @@ class CacheStats:
29
30
 
30
31
 
31
32
  def summary(
32
- pipe_or_module: DiffusionPipeline | torch.nn.Module | Any,
33
+ adapter_or_others: Union[
34
+ BlockAdapter,
35
+ DiffusionPipeline,
36
+ torch.nn.Module,
37
+ ],
33
38
  details: bool = False,
34
39
  logging: bool = True,
40
+ **kwargs,
41
+ ) -> List[CacheStats]:
42
+ if adapter_or_others is None:
43
+ return [CacheStats()]
44
+
45
+ if not isinstance(adapter_or_others, BlockAdapter):
46
+ if not isinstance(adapter_or_others, DiffusionPipeline):
47
+ transformer = adapter_or_others
48
+ transformer_2 = None
49
+ else:
50
+ transformer = adapter_or_others.transformer
51
+ transformer_2 = None
52
+ if hasattr(adapter_or_others, "transformer_2"):
53
+ transformer_2 = adapter_or_others.transformer_2
54
+
55
+ blocks_stats: List[CacheStats] = []
56
+ for blocks in BlockAdapter.find_blocks(transformer):
57
+ blocks_stats.append(
58
+ _summary(
59
+ blocks,
60
+ details=details,
61
+ logging=logging,
62
+ **kwargs,
63
+ )
64
+ )
65
+
66
+ if transformer_2 is not None:
67
+ for blocks in BlockAdapter.find_blocks(transformer_2):
68
+ blocks_stats.append(
69
+ _summary(
70
+ blocks,
71
+ details=details,
72
+ logging=logging,
73
+ **kwargs,
74
+ )
75
+ )
76
+
77
+ blocks_stats.append(
78
+ _summary(
79
+ transformer,
80
+ details=details,
81
+ logging=logging,
82
+ **kwargs,
83
+ )
84
+ )
85
+ if transformer_2 is not None:
86
+ blocks_stats.append(
87
+ _summary(
88
+ transformer_2,
89
+ details=details,
90
+ logging=logging,
91
+ **kwargs,
92
+ )
93
+ )
94
+
95
+ blocks_stats = [stats for stats in blocks_stats if stats.cache_options]
96
+
97
+ return blocks_stats if len(blocks_stats) else [CacheStats()]
98
+
99
+ adapter = adapter_or_others
100
+ if not BlockAdapter.check_block_adapter(adapter):
101
+ return [CacheStats()]
102
+
103
+ blocks_stats = []
104
+ flatten_blocks = BlockAdapter.flatten(adapter.blocks)
105
+ for blocks in flatten_blocks:
106
+ blocks_stats.append(
107
+ _summary(
108
+ blocks,
109
+ details=details,
110
+ logging=logging,
111
+ **kwargs,
112
+ )
113
+ )
114
+
115
+ blocks_stats = [stats for stats in blocks_stats if stats.cache_options]
116
+
117
+ return blocks_stats if len(blocks_stats) else [CacheStats()]
118
+
119
+
120
+ def strify(
121
+ adapter_or_others: Union[
122
+ BlockAdapter,
123
+ DiffusionPipeline,
124
+ CacheStats,
125
+ List[CacheStats],
126
+ Dict[str, Any],
127
+ ],
128
+ ) -> str:
129
+ if isinstance(adapter_or_others, BlockAdapter):
130
+ stats = summary(adapter_or_others, logging=False)[-1]
131
+ cache_options = stats.cache_options
132
+ cached_steps = len(stats.cached_steps)
133
+ elif isinstance(adapter_or_others, DiffusionPipeline):
134
+ stats = summary(adapter_or_others, logging=False)[-1]
135
+ cache_options = stats.cache_options
136
+ cached_steps = len(stats.cached_steps)
137
+ elif isinstance(adapter_or_others, CacheStats):
138
+ stats = adapter_or_others
139
+ cache_options = stats.cache_options
140
+ cached_steps = len(stats.cached_steps)
141
+ elif isinstance(adapter_or_others, list):
142
+ stats = adapter_or_others[0]
143
+ cache_options = stats.cache_options
144
+ cached_steps = len(stats.cached_steps)
145
+ elif isinstance(adapter_or_others, dict):
146
+ from cache_dit.cache_factory import CacheType
147
+
148
+ # Assume cache_context_kwargs
149
+ cache_options = adapter_or_others
150
+ cached_steps = None
151
+ cache_type = cache_options.get("cache_type", CacheType.NONE)
152
+
153
+ if cache_type == CacheType.NONE:
154
+ return "NONE"
155
+ else:
156
+ raise ValueError(
157
+ "Please set pipe_or_stats param as one of: "
158
+ "DiffusionPipeline | CacheStats | Dict[str, Any]"
159
+ )
160
+
161
+ if not cache_options:
162
+ return "NONE"
163
+
164
+ def get_taylorseer_order():
165
+ taylorseer_order = 0
166
+ if "taylorseer_order" in cache_options:
167
+ taylorseer_order = cache_options["taylorseer_order"]
168
+ return taylorseer_order
169
+
170
+ cache_type_str = (
171
+ f"DBCACHE_F{cache_options.get('Fn_compute_blocks', 1)}"
172
+ f"B{cache_options.get('Bn_compute_blocks', 0)}_"
173
+ f"W{cache_options.get('max_warmup_steps', 0)}"
174
+ f"M{max(0, cache_options.get('max_cached_steps', -1))}"
175
+ f"MC{max(0, cache_options.get('max_continuous_cached_steps', -1))}_"
176
+ f"T{int(cache_options.get('enable_taylorseer', False))}"
177
+ f"O{get_taylorseer_order()}_"
178
+ f"R{cache_options.get('residual_diff_threshold', 0.08)}"
179
+ )
180
+
181
+ if cached_steps:
182
+ cache_type_str += f"_S{cached_steps}"
183
+
184
+ return cache_type_str
185
+
186
+
187
+ def _summary(
188
+ pipe_or_module: Union[
189
+ DiffusionPipeline,
190
+ torch.nn.Module,
191
+ ],
192
+ details: bool = False,
193
+ logging: bool = True,
194
+ **kwargs,
35
195
  ) -> CacheStats:
36
196
  cache_stats = CacheStats()
37
197
 
@@ -51,6 +211,8 @@ def summary(
51
211
  cache_stats.cache_options = cache_options
52
212
  if logging:
53
213
  print(f"\n🤗Cache Options: {cls_name}\n\n{cache_options}")
214
+ else:
215
+ logger.warning(f"Can't find Cache Options for: {cls_name}")
54
216
 
55
217
  if hasattr(module, "_cached_steps"):
56
218
  cached_steps: list[int] = module._cached_steps
@@ -141,56 +303,3 @@ def summary(
141
303
  )
142
304
 
143
305
  return cache_stats
144
-
145
-
146
- def strify(
147
- pipe_or_stats: DiffusionPipeline | CacheStats | Dict[str, Any],
148
- ) -> str:
149
- if isinstance(pipe_or_stats, DiffusionPipeline):
150
- stats = summary(pipe_or_stats, logging=False)
151
- cache_options = stats.cache_options
152
- cached_steps = len(stats.cached_steps)
153
- elif isinstance(pipe_or_stats, CacheStats):
154
- stats = pipe_or_stats
155
- cache_options = stats.cache_options
156
- cached_steps = len(stats.cached_steps)
157
- elif isinstance(pipe_or_stats, dict):
158
- from cache_dit.cache_factory import CacheType
159
-
160
- # Assume cache_context_kwargs
161
- cache_options = pipe_or_stats
162
- cached_steps = None
163
- cache_type = cache_options.get("cache_type", CacheType.NONE)
164
-
165
- if cache_type == CacheType.NONE:
166
- return "NONE"
167
- else:
168
- raise ValueError(
169
- "Please set pipe_or_stats param as one of: "
170
- "DiffusionPipeline | CacheStats | Dict[str, Any]"
171
- )
172
-
173
- if not cache_options:
174
- return "NONE"
175
-
176
- def get_taylorseer_order():
177
- taylorseer_order = 0
178
- if "taylorseer_order" in cache_options:
179
- taylorseer_order = cache_options["taylorseer_order"]
180
- return taylorseer_order
181
-
182
- cache_type_str = (
183
- f"DBCACHE_F{cache_options.get('Fn_compute_blocks', 1)}"
184
- f"B{cache_options.get('Bn_compute_blocks', 0)}_"
185
- f"W{cache_options.get('max_warmup_steps', 0)}"
186
- f"M{max(0, cache_options.get('max_cached_steps', -1))}"
187
- f"MC{max(0, cache_options.get('max_continuous_cached_steps', -1))}_"
188
- f"T{int(cache_options.get('enable_taylorseer', False))}"
189
- f"O{get_taylorseer_order()}_"
190
- f"R{cache_options.get('residual_diff_threshold', 0.08)}"
191
- )
192
-
193
- if cached_steps:
194
- cache_type_str += f"_S{cached_steps}"
195
-
196
- return cache_type_str
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cache_dit
3
- Version: 0.2.28
3
+ Version: 0.2.29
4
4
  Summary: 🤗 A Unified and Training-free Cache Acceleration Toolbox for Diffusion Transformers
5
5
  Author: DefTruth, vipshop.com, etc.
6
6
  Maintainer: DefTruth, vipshop.com, etc
@@ -43,7 +43,7 @@ Dynamic: requires-python
43
43
  <div align="center">
44
44
  <img src=https://github.com/vipshop/cache-dit/raw/main/assets/cache-dit-logo.png height="120">
45
45
 
46
- <p align="center">
46
+ <p align="center">
47
47
  A <b>Unified</b> and Training-free <b>Cache Acceleration</b> Toolbox for <b>Diffusion Transformers</b> <br>
48
48
  ♥️ <b>Cache Acceleration</b> with <b>One-line</b> Code ~ ♥️
49
49
  </p>
@@ -63,13 +63,25 @@ Dynamic: requires-python
63
63
  🔥<b><a href="#supported">Qwen-Image</a> | <a href="#supported">FLUX.1</a> | <a href="#supported">Wan 2.1/2.2</a> | <a href="#supported"> ... </a> | <a href="#supported">CogVideoX</a></b>🔥
64
64
  </p>
65
65
  </div>
66
+ <div align='center'>
67
+ <img src=./assets/gifs/wan2.2.C0_Q0_NONE.gif width=160px>
68
+ <img src=./assets/gifs/wan2.2.C1_Q0_DBCACHE_F1B0_W2M8MC2_T1O2_R0.08.gif width=160px>
69
+ <img src=./assets/gifs/wan2.2.C1_Q1_fp8_w8a8_dq_DBCACHE_F1B0_W2M8MC2_T1O2_R0.08.gif width=160px>
70
+ <p><b>🔥Wan2.2 MoE</b> Baseline | <b><a href="https://github.com/vipshop/cache-dit">+cache-dit</a>:~2.0x↑🎉</b> | +FP8 DQ:<b>~2.4x↑🎉</b></p>
71
+ <img src=./assets/qwen-image.C0_Q0_NONE.png width=160px>
72
+ <img src=./assets/qwen-image.C1_Q0_DBCACHE_F8B0_W8M0MC0_T1O4_R0.12_S23.png width=160px>
73
+ <img src=./assets/qwen-image.C1_Q1_fp8_w8a8_dq_DBCACHE_F8B0_W8M0MC0_T1O4_R0.12_S18.png width=160px>
74
+ <p><b>🔥Qwen-Image</b> Baseline | <b><a href="https://github.com/vipshop/cache-dit">+cache-dit</a>:~1.8x↑🎉</b> | +FP8 DQ:<b>~2.2x↑🎉</b><br>♥️ Please consider to leave a <b>⭐️ Star</b> to support us ~ ♥️</p>
75
+ </p>
76
+ </div>
66
77
 
67
78
  ## 🔥News
68
79
 
69
- - [2025-09-03] 🎉[**Wan2.2-MoE**](https://github.com/Wan-Video) **2.4x⚡️** speedup! Please refer to [run_wan_2.2.py](./examples/pipeline/run_wan_2.2.py) as an example.
70
- - [2025-08-19] 🔥[**Qwen-Image-Edit**](https://github.com/QwenLM/Qwen-Image) **2x⚡️** speedup! Check the example: [run_qwen_image_edit.py](./examples/pipeline/run_qwen_image_edit.py).
80
+ - [2025-09-03] 🎉[**Wan2.2-MoE**](https://github.com/Wan-Video) **2.4x↑🎉** speedup! Please refer to [run_wan_2.2.py](./examples/pipeline/run_wan_2.2.py) as an example.
81
+ - [2025-08-19] 🔥[**Qwen-Image-Edit**](https://github.com/QwenLM/Qwen-Image) **2x↑🎉** speedup! Check the example: [run_qwen_image_edit.py](./examples/pipeline/run_qwen_image_edit.py).
71
82
  - [2025-08-12] 🎉First caching mechanism in [QwenLM/Qwen-Image](https://github.com/QwenLM/Qwen-Image) with **[cache-dit](https://github.com/vipshop/cache-dit)**, check this [PR](https://github.com/QwenLM/Qwen-Image/pull/61).
72
- - [2025-08-11] 🔥[**Qwen-Image**](https://github.com/QwenLM/Qwen-Image) **1.8x⚡️** speedup! Please refer to [run_qwen_image.py](./examples/pipeline/run_qwen_image.py) as an example.
83
+ - [2025-08-11] 🔥[**Qwen-Image**](https://github.com/QwenLM/Qwen-Image) **1.8x↑🎉** speedup! Please refer to [run_qwen_image.py](./examples/pipeline/run_qwen_image.py) as an example.
84
+ - [2025-07-13] 🎉[**FLUX.1-Dev**](https://github.com/xlite-dev/flux-faster) **3.3x↑🎉** speedup! NVIDIA L20 with **[cache-dit](https://github.com/vipshop/cache-dit)** + `compile + FP8 DQ`.
73
85
 
74
86
  <details>
75
87
  <summary> Previous News </summary>
@@ -78,7 +90,6 @@ Dynamic: requires-python
78
90
  - [2025-08-29] 🔥</b>Covers <b>100%</b> Diffusers' <b>DiT-based</b> Pipelines: **[BlockAdapter](#unified) + [Pattern Matching](#unified).**
79
91
  - [2025-08-10] 🔥[**FLUX.1-Kontext-dev**](https://huggingface.co/black-forest-labs/FLUX.1-Kontext-dev) is supported! Please refer [run_flux_kontext.py](./examples/pipeline/run_flux_kontext.py) as an example.
80
92
  - [2025-07-18] 🎉First caching mechanism in [🤗huggingface/flux-fast](https://github.com/huggingface/flux-fast) with **[cache-dit](https://github.com/vipshop/cache-dit)**, check the [PR](https://github.com/huggingface/flux-fast/pull/13).
81
- - [2025-07-13] **[🤗flux-faster](https://github.com/xlite-dev/flux-faster)** is released! **3.3x** speedup for FLUX.1 on NVIDIA L20 with **[cache-dit](https://github.com/vipshop/cache-dit)**.
82
93
 
83
94
  </details>
84
95
 
@@ -200,6 +211,9 @@ cache_dit.enable_cache(pipe)
200
211
 
201
212
  # Just call the pipe as normal.
202
213
  output = pipe(...)
214
+
215
+ # Disable cache and run original pipe.
216
+ cache_dit.disable_cache(pipe)
203
217
  ```
204
218
 
205
219
  ### 🔥Automatic Block Adapter
@@ -226,7 +240,6 @@ cache_dit.enable_cache(
226
240
  pipe=pipe, # Qwen-Image, etc.
227
241
  transformer=pipe.transformer,
228
242
  blocks=pipe.transformer.transformer_blocks,
229
- blocks_name="transformer_blocks",
230
243
  forward_pattern=ForwardPattern.Pattern_1,
231
244
  ),
232
245
  )
@@ -248,10 +261,6 @@ cache_dit.enable_cache(
248
261
  pipe.transformer.transformer_blocks,
249
262
  pipe.transformer.single_transformer_blocks,
250
263
  ],
251
- blocks_name=[
252
- "transformer_blocks",
253
- "single_transformer_blocks",
254
- ],
255
264
  forward_pattern=[
256
265
  ForwardPattern.Pattern_1,
257
266
  ForwardPattern.Pattern_3,
@@ -457,11 +466,21 @@ cache-dit-metrics-cli all -i1 true_dir -i2 test_dir # image dir
457
466
 
458
467
  How to contribute? Star ⭐️ this repo to support us or check [CONTRIBUTE.md](./CONTRIBUTE.md).
459
468
 
460
- ## ©️License
469
+ <div align='center'>
470
+ <a href="https://star-history.com/#vipshop/cache-dit&Date">
471
+ <picture align='center'>
472
+ <source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=vipshop/cache-dit&type=Date&theme=dark" />
473
+ <source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=vipshop/cache-dit&type=Date" />
474
+ <img alt="Star History Chart" src="https://api.star-history.com/svg?repos=vipshop/cache-dit&type=Date" width=400px />
475
+ </picture>
476
+ </a>
477
+ </div>
478
+
479
+ ## ©️Acknowledgements
461
480
 
462
- <div id="license"></div>
481
+ <div id="Acknowledgements"></div>
463
482
 
464
- The **cache-dit** codebase is adapted from FBCache. Special thanks to their excellent work! We have followed the original License from FBCache, please check [LICENSE](./LICENSE) for more details.
483
+ The **cache-dit** codebase is adapted from FBCache. Over time its codebase diverged a lot, and **cache-dit** API is no longer compatible with FBCache.
465
484
 
466
485
  ## ©️Citations
467
486
 
@@ -1,30 +1,30 @@
1
- cache_dit/__init__.py,sha256=V4jCkTic4XvWojCUqYcjlvxiNM2DjGQbOLk6R-tAx2A,1191
2
- cache_dit/_version.py,sha256=hCl1MKhh249NDbigjeJY-mrKYvjPFbJ7oklAepBQrto,706
1
+ cache_dit/__init__.py,sha256=kX9V-FegZG4c8LMwI4PTmMqH794MEW0pzDArdhC0cJw,1241
2
+ cache_dit/_version.py,sha256=4_NDrwSRsA8gshfOOEHYB4RwOrbBlY3Re7Srt7YQl4M,706
3
3
  cache_dit/logger.py,sha256=0zsu42hN-3-rgGC_C29ms1IvVpV4_b4_SwJCKSenxBE,4304
4
- cache_dit/utils.py,sha256=pb5298XKmaZDoHwyteYRhixAG_0DGWrvvaObeShIhOM,7146
4
+ cache_dit/utils.py,sha256=bMeZw377_mACEj3nV1tn5DTqypBsbUVvZWJYjNQQxPg,10399
5
5
  cache_dit/cache_factory/.gitignore,sha256=5Cb-qT9wsTUoMJ7vACDF7ZcLpAXhi5v-xdcWSRit988,23
6
- cache_dit/cache_factory/__init__.py,sha256=M8q9furJOq2AZcLHRuCXZCjR9fNSELoEYdsCofIjMAo,1037
7
- cache_dit/cache_factory/cache_adapters.py,sha256=q7MxY44qw90h449Gr8W5iJjSwXPJR-YIyRmu_KJnQo0,13284
8
- cache_dit/cache_factory/cache_interface.py,sha256=2jcuTZ4D_P0M5pSw0z3BMPalobYen3YO1yKvRjaQjdQ,8332
6
+ cache_dit/cache_factory/__init__.py,sha256=Iw6-iJLFbdzCsIDZXXOw371L-HPmoeZO_P9a3sDjP5s,1103
7
+ cache_dit/cache_factory/cache_adapters.py,sha256=knNzV4BbCQmyiwsybFGl3LpTDEFtenykLb9-y_bbDpA,13905
8
+ cache_dit/cache_factory/cache_interface.py,sha256=wy9QNNNNH4ncdyGepuvyJSJLbaTRCqqlvxtO6Os20yA,11317
9
9
  cache_dit/cache_factory/cache_types.py,sha256=FIFa6ZBfvvSMMHyBBhvarvgg2Y2wbRgITcG_uGylGe0,991
10
10
  cache_dit/cache_factory/forward_pattern.py,sha256=FumlCuZ-TSmSYH0hGBHctSJ-oGLCftdZjLygqhsmdR4,2258
11
11
  cache_dit/cache_factory/utils.py,sha256=XkVM9AXcB9zYq8-S8QKAsGz80r3tA6U3lBNGDGeHOe4,1871
12
- cache_dit/cache_factory/block_adapters/__init__.py,sha256=UFuHxNR7Y0RZoCl97wn0u2d_2rj8PzNsWfzgda5AoKM,17395
13
- cache_dit/cache_factory/block_adapters/block_adapters.py,sha256=WsqGUDSDU_5-pIXwDqAK_k4a-4jgbFGoLCoF6kAjLt8,19198
14
- cache_dit/cache_factory/block_adapters/block_registers.py,sha256=99ouWioxldlZJYQWhcUkOu94f8vO9V9QGzVNhKWtyO4,2005
12
+ cache_dit/cache_factory/block_adapters/__init__.py,sha256=mtYPmsAYz4MGsMmanf6xZLaxZEkgE8gwB5mYhrM4nw4,15862
13
+ cache_dit/cache_factory/block_adapters/block_adapters.py,sha256=sRTAfhDxdj3VAm7ejyklGwM7HMimQSEgVroVkLx7CR8,20997
14
+ cache_dit/cache_factory/block_adapters/block_registers.py,sha256=79HpWTX7PO2ynY8I-KnF6pa-ETV4Dlpbxn5wvp_iyvw,2387
15
15
  cache_dit/cache_factory/cache_blocks/__init__.py,sha256=OWjnpJxA8EJVoRzuyb5miuiRphUFj831-bbtWsTDjnM,2750
16
16
  cache_dit/cache_factory/cache_blocks/pattern_0_1_2.py,sha256=ElMps6_7uI74tSF9GDR_dEI0bZEhdzcepM29xFWnYo8,428
17
17
  cache_dit/cache_factory/cache_blocks/pattern_3_4_5.py,sha256=CtBr6nvtAW8SAeEwPwiwWtPgrmwyb5ukb-j3IwFULJU,9953
18
18
  cache_dit/cache_factory/cache_blocks/pattern_base.py,sha256=XSDy3hsaKbAZPGZY92YgGA0qLgjQyIX8irQkb2R5T2c,20331
19
- cache_dit/cache_factory/cache_blocks/utils.py,sha256=wfreGvtK22hDnXuw0z0hUw-9ywu91FnExfPkP8ZzlkA,891
19
+ cache_dit/cache_factory/cache_blocks/utils.py,sha256=dGOC1tMMOvcbvEgx44eTESKn_jsv-0RZ3tRHPa3wmQ4,1315
20
20
  cache_dit/cache_factory/cache_contexts/__init__.py,sha256=rqnJ5__zqnpVHK5A1OqWILpNh5Ss-0ZDTGgtxZMKGGo,250
21
21
  cache_dit/cache_factory/cache_contexts/cache_context.py,sha256=N88WLdd4KE9DuMWmpX8URcF55E2zWNwcKMxgVYkxMJY,13691
22
- cache_dit/cache_factory/cache_contexts/cache_manager.py,sha256=wSghuX93KmCxE4pFEVKuyrO0Jt5STu_x4CxypS2EdxI,34276
22
+ cache_dit/cache_factory/cache_contexts/cache_manager.py,sha256=Tgk2-VFEhUp-oe-TFHzXay_YgbU8v90_Nx2G17ZlTds,34937
23
23
  cache_dit/cache_factory/cache_contexts/taylorseer.py,sha256=etSUIZzDvqW3ScKCbccTPcFaSmxV1T-xAXdk-p3e3wk,3802
24
24
  cache_dit/cache_factory/patch_functors/__init__.py,sha256=yK05iONMGILsTZ83ynrUUJtiJKJ_FDjxmVIzRLy416s,252
25
25
  cache_dit/cache_factory/patch_functors/functor_base.py,sha256=Ahk0fTfrHgNdEl-9JSkACvfyyv9G-Ei5OSz7XBIlX5o,357
26
- cache_dit/cache_factory/patch_functors/functor_chroma.py,sha256=IFCuFU8HCnohM9Qpij7oU_UG1T8Ok8uTI3B9Nw7BHfY,10029
27
- cache_dit/cache_factory/patch_functors/functor_flux.py,sha256=3Q8x-PEeDiUtuv-FnQ2qEzo4qbpsYOPB9uf7YridE88,9538
26
+ cache_dit/cache_factory/patch_functors/functor_chroma.py,sha256=Z0kPAib0TkXGzJIP9FRK559UlBVuGQSZIVFir6tHzJM,10022
27
+ cache_dit/cache_factory/patch_functors/functor_flux.py,sha256=w_QaUwv7l7ypvFxWHzjHjAafLr1PxQcgv5N7VFjr6N8,9531
28
28
  cache_dit/compile/__init__.py,sha256=FcTVzCeyypl-mxlc59_ehHL3lBNiDAFsXuRoJ-5Cfi0,56
29
29
  cache_dit/compile/utils.py,sha256=nN2OIrSdwRR5zGxJinKDqb07pXpvTNTF3g_OgLkeeBU,3858
30
30
  cache_dit/custom_ops/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -39,9 +39,9 @@ cache_dit/quantize/__init__.py,sha256=kWYoMAyZgBXu9BJlZjTQ0dRffW9GqeeY9_iTkXrb70
39
39
  cache_dit/quantize/quantize_ao.py,sha256=mGspqYgQtenl3QnKPtsSYsSD7LbVX93f1M940bhXKLU,6066
40
40
  cache_dit/quantize/quantize_interface.py,sha256=2s_R7xPSKuJeFpEGeLwRxnq_CqJcBG3a3lzyW5wh-UM,1241
41
41
  cache_dit/quantize/quantize_svdq.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
42
- cache_dit-0.2.28.dist-info/licenses/LICENSE,sha256=Dqb07Ik2dV41s9nIdMUbiRWEfDqo7-dQeRiY7kPO8PE,3769
43
- cache_dit-0.2.28.dist-info/METADATA,sha256=03FPh4nIDfjSFMfkDz-sWr2g3l30UsQek8VjQ6TPn8g,23204
44
- cache_dit-0.2.28.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
45
- cache_dit-0.2.28.dist-info/entry_points.txt,sha256=FX2gysXaZx6NeK1iCLMcIdP8Q4_qikkIHtEmi3oWn8o,65
46
- cache_dit-0.2.28.dist-info/top_level.txt,sha256=ZJDydonLEhujzz0FOkVbO-BqfzO9d_VqRHmZU-3MOZo,10
47
- cache_dit-0.2.28.dist-info/RECORD,,
42
+ cache_dit-0.2.29.dist-info/licenses/LICENSE,sha256=Dqb07Ik2dV41s9nIdMUbiRWEfDqo7-dQeRiY7kPO8PE,3769
43
+ cache_dit-0.2.29.dist-info/METADATA,sha256=wagdLaiAIX7fs5Rsw89DMsj5KQl1t3zGsTosKJe2AlQ,24540
44
+ cache_dit-0.2.29.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
45
+ cache_dit-0.2.29.dist-info/entry_points.txt,sha256=FX2gysXaZx6NeK1iCLMcIdP8Q4_qikkIHtEmi3oWn8o,65
46
+ cache_dit-0.2.29.dist-info/top_level.txt,sha256=ZJDydonLEhujzz0FOkVbO-BqfzO9d_VqRHmZU-3MOZo,10
47
+ cache_dit-0.2.29.dist-info/RECORD,,