Skip to content

Commit

Permalink
make style
Browse files Browse the repository at this point in the history
  • Loading branch information
weak-kajuma committed Dec 21, 2024
1 parent 61faf92 commit 2fc8526
Showing 1 changed file with 0 additions and 8 deletions.
8 changes: 0 additions & 8 deletions src/transformers/models/diffllama/modular_diffllama.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,21 +24,15 @@

from ...cache_utils import Cache, StaticCache
from ...modeling_flash_attention_utils import _flash_attention_forward
from ...pytorch_utils import ALL_LAYERNORM_LAYERS
from ...utils import (
is_flash_attn_greater_or_equal_2_10,
logging,
)
from ..gemma.modeling_gemma import GemmaForCausalLM
from ..llama.modeling_llama import (
LlamaDecoderLayer,
LlamaForQuestionAnswering,
LlamaForSequenceClassification,
LlamaForTokenClassification,
LlamaModel,
LlamaPreTrainedModel,
LlamaRMSNorm,
LlamaRotaryEmbedding,
apply_rotary_pos_emb,
repeat_kv,
)
Expand Down Expand Up @@ -436,8 +430,6 @@ class DiffLlamaForTokenClassification(LlamaForTokenClassification):


__all__ = [
"DiffLlamaPreTrainedModel",
"DiffLlamaModel",
"DiffLlamaForCausalLM",
"DiffLlamaForSequenceClassification",
"DiffLlamaForQuestionAnswering",
Expand Down

0 comments on commit 2fc8526

Please sign in to comment.