Skip to content

Commit

Permalink
Merge pull request #11 from HaoyangLee/hunyuanvideo_dd
Browse files Browse the repository at this point in the history
call mindone.transformers, use LlamaFlashAttention2 as FA
  • Loading branch information
wtomin authored Feb 11, 2025
2 parents 316bb8c + a1fc58c commit 7035e2a
Show file tree
Hide file tree
Showing 10 changed files with 5 additions and 1,673 deletions.
11 changes: 4 additions & 7 deletions examples/hunyuanvideo/hyvideo/text_encoder/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
import os
import sys
from dataclasses import dataclass
from typing import Optional, Tuple

Expand All @@ -10,12 +8,11 @@
import mindspore as ms
from mindspore import Tensor, nn

from mindone.transformers import CLIPTextModel
from mindone.transformers import CLIPTextModel, LlamaModel
from mindone.transformers.models.llama.modeling_llama import ALL_LAYERNORM_LAYERS
from mindone.utils.amp import auto_mixed_precision

from ..constants import PRECISION_TO_TYPE, TEXT_ENCODER_PATH, TOKENIZER_PATH
from .transformers import LlamaModel
from .transformers.models.llama.modeling_llama import ALL_LAYERNORM_LAYERS
from constants import PRECISION_TO_TYPE, TEXT_ENCODER_PATH, TOKENIZER_PATH


def use_default(value, default):
Expand All @@ -36,7 +33,7 @@ def load_text_encoder(
text_encoder = CLIPTextModel.from_pretrained(text_encoder_path)
text_encoder.final_layer_norm = text_encoder.text_model.final_layer_norm
elif text_encoder_type == "llm":
text_encoder = LlamaModel.from_pretrained(text_encoder_path)
text_encoder = LlamaModel.from_pretrained(text_encoder_path, use_flash_attention_2=True)
text_encoder.final_layer_norm = text_encoder.norm
else:
raise ValueError(f"Unsupported text encoder type: {text_encoder_type}")
Expand Down

This file was deleted.

281 changes: 0 additions & 281 deletions examples/hunyuanvideo/hyvideo/text_encoder/transformers/cache_utils.py

This file was deleted.

This file was deleted.

Loading

0 comments on commit 7035e2a

Please sign in to comment.