cache-dit 0.2.7__py3-none-any.whl → 0.2.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cache_dit/_version.py +2 -2
- cache_dit/cache_factory/dual_block_cache/cache_context.py +0 -6
- {cache_dit-0.2.7.dist-info → cache_dit-0.2.8.dist-info}/METADATA +1 -1
- {cache_dit-0.2.7.dist-info → cache_dit-0.2.8.dist-info}/RECORD +8 -8
- {cache_dit-0.2.7.dist-info → cache_dit-0.2.8.dist-info}/WHEEL +0 -0
- {cache_dit-0.2.7.dist-info → cache_dit-0.2.8.dist-info}/entry_points.txt +0 -0
- {cache_dit-0.2.7.dist-info → cache_dit-0.2.8.dist-info}/licenses/LICENSE +0 -0
- {cache_dit-0.2.7.dist-info → cache_dit-0.2.8.dist-info}/top_level.txt +0 -0
cache_dit/_version.py
CHANGED
|
@@ -1162,7 +1162,6 @@ class DBCachedTransformerBlocks(torch.nn.Module):
|
|
|
1162
1162
|
|
|
1163
1163
|
torch._dynamo.graph_break()
|
|
1164
1164
|
if can_use_cache:
|
|
1165
|
-
torch._dynamo.graph_break()
|
|
1166
1165
|
add_cached_step()
|
|
1167
1166
|
del Fn_hidden_states_residual
|
|
1168
1167
|
hidden_states, encoder_hidden_states = apply_hidden_states_residual(
|
|
@@ -1189,7 +1188,6 @@ class DBCachedTransformerBlocks(torch.nn.Module):
|
|
|
1189
1188
|
)
|
|
1190
1189
|
)
|
|
1191
1190
|
else:
|
|
1192
|
-
torch._dynamo.graph_break()
|
|
1193
1191
|
set_Fn_buffer(Fn_hidden_states_residual, prefix="Fn_residual")
|
|
1194
1192
|
if is_l1_diff_enabled():
|
|
1195
1193
|
# for hidden states L1 diff
|
|
@@ -1797,7 +1795,6 @@ class DBCachedTransformerBlocks(torch.nn.Module):
|
|
|
1797
1795
|
f"the number of single transformer blocks {len(self.single_transformer_blocks)}"
|
|
1798
1796
|
)
|
|
1799
1797
|
|
|
1800
|
-
torch._dynamo.graph_break()
|
|
1801
1798
|
hidden_states = torch.cat(
|
|
1802
1799
|
[encoder_hidden_states, hidden_states], dim=1
|
|
1803
1800
|
)
|
|
@@ -1829,13 +1826,11 @@ class DBCachedTransformerBlocks(torch.nn.Module):
|
|
|
1829
1826
|
],
|
|
1830
1827
|
dim=1,
|
|
1831
1828
|
)
|
|
1832
|
-
torch._dynamo.graph_break()
|
|
1833
1829
|
else:
|
|
1834
1830
|
assert Bn_compute_blocks() <= len(self.transformer_blocks), (
|
|
1835
1831
|
f"Bn_compute_blocks {Bn_compute_blocks()} must be less than "
|
|
1836
1832
|
f"the number of transformer blocks {len(self.transformer_blocks)}"
|
|
1837
1833
|
)
|
|
1838
|
-
torch._dynamo.graph_break()
|
|
1839
1834
|
if len(Bn_compute_blocks_ids()) > 0:
|
|
1840
1835
|
for i, block in enumerate(self._Bn_transformer_blocks()):
|
|
1841
1836
|
hidden_states, encoder_hidden_states = (
|
|
@@ -1864,7 +1859,6 @@ class DBCachedTransformerBlocks(torch.nn.Module):
|
|
|
1864
1859
|
encoder_hidden_states,
|
|
1865
1860
|
hidden_states,
|
|
1866
1861
|
)
|
|
1867
|
-
torch._dynamo.graph_break()
|
|
1868
1862
|
|
|
1869
1863
|
hidden_states = (
|
|
1870
1864
|
hidden_states.reshape(-1)
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
cache_dit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
cache_dit/_version.py,sha256=
|
|
2
|
+
cache_dit/_version.py,sha256=zkhRarrvPoGA1yWjS9_zVM80dWqpDesNn9DiHcF4JWM,511
|
|
3
3
|
cache_dit/logger.py,sha256=0zsu42hN-3-rgGC_C29ms1IvVpV4_b4_SwJCKSenxBE,4304
|
|
4
4
|
cache_dit/primitives.py,sha256=A2iG9YLot3gOsZSPp-_gyjqjLgJvWQRx8aitD4JQ23Y,3877
|
|
5
5
|
cache_dit/cache_factory/__init__.py,sha256=5RNuhWakvvqrOV4vkqrEBA7d-V1LwcNSsjtW14mkqK8,5255
|
|
6
6
|
cache_dit/cache_factory/taylorseer.py,sha256=LKSNo2ode69EVo9xrxjxAMEjz0yDGiGADeDYnEqddA8,3987
|
|
7
7
|
cache_dit/cache_factory/utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
8
|
cache_dit/cache_factory/dual_block_cache/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
|
-
cache_dit/cache_factory/dual_block_cache/cache_context.py,sha256=
|
|
9
|
+
cache_dit/cache_factory/dual_block_cache/cache_context.py,sha256=itVEb6gT2eZuncAHUmP51ZS0r6v6cGtRvnPjyeXqKH8,71156
|
|
10
10
|
cache_dit/cache_factory/dual_block_cache/diffusers_adapters/__init__.py,sha256=krNAICf-aS3JLmSG8vOB9tpLa04uYRcABsC8PMbVUKY,1870
|
|
11
11
|
cache_dit/cache_factory/dual_block_cache/diffusers_adapters/cogvideox.py,sha256=3xUjvDzor9AkBkDUc0N7kZqM86MIdajuigesnicNzXE,2260
|
|
12
12
|
cache_dit/cache_factory/dual_block_cache/diffusers_adapters/flux.py,sha256=cIsov6Pf0dRyddqkzTA2CU-jSDotof8LQr-HIoY9T9M,2615
|
|
@@ -38,9 +38,9 @@ cache_dit/metrics/config.py,sha256=ieOgD9ayz722RjVzk24bSIqS2D6o7TZjGk8KeXV-OLQ,5
|
|
|
38
38
|
cache_dit/metrics/fid.py,sha256=9Ivtazl6mW0Bon2VXa-Ia5Xj2ewxRD3V1Qkd69zYM3Y,17066
|
|
39
39
|
cache_dit/metrics/inception.py,sha256=pBVe2X6ylLPIXTG4-GWDM9DWnCviMJbJ45R3ulhktR0,12759
|
|
40
40
|
cache_dit/metrics/metrics.py,sha256=tzAtG_-fM1xPIBfRVFIBupvOWYzIO3xDq29Vy5rOBWc,14730
|
|
41
|
-
cache_dit-0.2.
|
|
42
|
-
cache_dit-0.2.
|
|
43
|
-
cache_dit-0.2.
|
|
44
|
-
cache_dit-0.2.
|
|
45
|
-
cache_dit-0.2.
|
|
46
|
-
cache_dit-0.2.
|
|
41
|
+
cache_dit-0.2.8.dist-info/licenses/LICENSE,sha256=Dqb07Ik2dV41s9nIdMUbiRWEfDqo7-dQeRiY7kPO8PE,3769
|
|
42
|
+
cache_dit-0.2.8.dist-info/METADATA,sha256=8E51DpSKDGqk3_cG9buahoXN-7fub6M8VCiPb_Idg64,27608
|
|
43
|
+
cache_dit-0.2.8.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
44
|
+
cache_dit-0.2.8.dist-info/entry_points.txt,sha256=FX2gysXaZx6NeK1iCLMcIdP8Q4_qikkIHtEmi3oWn8o,65
|
|
45
|
+
cache_dit-0.2.8.dist-info/top_level.txt,sha256=ZJDydonLEhujzz0FOkVbO-BqfzO9d_VqRHmZU-3MOZo,10
|
|
46
|
+
cache_dit-0.2.8.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|