-
Notifications
You must be signed in to change notification settings - Fork 6.6k
Add support for Magcache #12744
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Add support for Magcache #12744
Changes from 11 commits
8dbb673
a8a57c6
cbf4b5e
a6a9fb4
bb0a069
37f8826
0a05bec
535a14e
ebbebbe
00e9b96
3603e6c
a282057
f672d37
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -23,7 +23,13 @@ | |
| _ATTENTION_CLASSES = (Attention, MochiAttention, AttentionModuleMixin) | ||
| _FEEDFORWARD_CLASSES = (FeedForward, LuminaFeedForward) | ||
|
|
||
| _SPATIAL_TRANSFORMER_BLOCK_IDENTIFIERS = ("blocks", "transformer_blocks", "single_transformer_blocks", "layers") | ||
| _SPATIAL_TRANSFORMER_BLOCK_IDENTIFIERS = ( | ||
| "blocks", | ||
| "transformer_blocks", | ||
| "single_transformer_blocks", | ||
| "layers", | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. For ZImage I am guessing?
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think this change is not from my branch. I only appended "visual_transformer_blocks" to the list to support Kandinsky 5.0. |
||
| "visual_transformer_blocks", | ||
| ) | ||
| _TEMPORAL_TRANSFORMER_BLOCK_IDENTIFIERS = ("temporal_transformer_blocks",) | ||
| _CROSS_TRANSFORMER_BLOCK_IDENTIFIERS = ("blocks", "transformer_blocks", "layers") | ||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -26,6 +26,7 @@ class AttentionProcessorMetadata: | |
| class TransformerBlockMetadata: | ||
| return_hidden_states_index: int = None | ||
| return_encoder_hidden_states_index: int = None | ||
| hidden_states_argument_name: str = "hidden_states" | ||
|
|
||
| _cls: Type = None | ||
| _cached_parameter_indices: Dict[str, int] = None | ||
|
|
@@ -169,7 +170,7 @@ def _register_attention_processors_metadata(): | |
|
|
||
|
|
||
| def _register_transformer_blocks_metadata(): | ||
| from ..models.attention import BasicTransformerBlock | ||
| from ..models.attention import BasicTransformerBlock, JointTransformerBlock | ||
| from ..models.transformers.cogvideox_transformer_3d import CogVideoXBlock | ||
| from ..models.transformers.transformer_bria import BriaTransformerBlock | ||
| from ..models.transformers.transformer_cogview4 import CogView4TransformerBlock | ||
|
|
@@ -184,6 +185,7 @@ def _register_transformer_blocks_metadata(): | |
| HunyuanImageSingleTransformerBlock, | ||
| HunyuanImageTransformerBlock, | ||
| ) | ||
| from ..models.transformers.transformer_kandinsky import Kandinsky5TransformerDecoderBlock | ||
| from ..models.transformers.transformer_ltx import LTXVideoTransformerBlock | ||
| from ..models.transformers.transformer_mochi import MochiTransformerBlock | ||
| from ..models.transformers.transformer_qwenimage import QwenImageTransformerBlock | ||
|
|
@@ -331,6 +333,24 @@ def _register_transformer_blocks_metadata(): | |
| ), | ||
| ) | ||
|
|
||
| TransformerBlockRegistry.register( | ||
| model_class=JointTransformerBlock, | ||
| metadata=TransformerBlockMetadata( | ||
| return_hidden_states_index=1, | ||
| return_encoder_hidden_states_index=0, | ||
| ), | ||
| ) | ||
|
Comment on lines
+336
to
+342
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. For SD3 I am guessing?
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. yes |
||
|
|
||
| # Kandinsky 5.0 (Kandinsky5TransformerDecoderBlock) | ||
| TransformerBlockRegistry.register( | ||
| model_class=Kandinsky5TransformerDecoderBlock, | ||
| metadata=TransformerBlockMetadata( | ||
| return_hidden_states_index=0, | ||
| return_encoder_hidden_states_index=None, | ||
| hidden_states_argument_name="visual_embed", | ||
| ), | ||
| ) | ||
|
|
||
|
|
||
| # fmt: off | ||
| def _skip_attention___ret___hidden_states(self, *args, **kwargs): | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.