Remove unavailable import

The Transformers codebase doesn't really support Torch 1.x anymore, so the compatibility checks have been removed in the most recent versions!
This commit is contained in:
Matthew Carrigan 2025-01-27 23:40:48 +00:00 committed by system
parent d9c339cd7d
commit 03f6b95966
No known key found for this signature in database
GPG Key ID: 6A528E38E0733467

View File

@ -43,7 +43,6 @@ from transformers.modeling_outputs import (
from transformers.modeling_utils import PreTrainedModel
from transformers.pytorch_utils import (
ALL_LAYERNORM_LAYERS,
is_torch_greater_or_equal_than_1_13,
)
from transformers.utils import (
add_start_docstrings,
@ -66,9 +65,6 @@ if is_flash_attn_2_available():
# This makes `_prepare_4d_causal_attention_mask` a leaf function in the FX graph.
# It means that the function will not be traced through and simply appear as a node in the graph.
if is_torch_fx_available():
if not is_torch_greater_or_equal_than_1_13:
import torch.fx
_prepare_4d_causal_attention_mask = torch.fx.wrap(_prepare_4d_causal_attention_mask)