mirror of
https://huggingface.co/deepseek-ai/DeepSeek-R1-Zero
synced 2026-03-10 21:16:30 +01:00
Remove unavailable import
The Transformers codebase doesn't really support Torch 1.x anymore, so the compatibility checks have been removed in the most recent versions!
This commit is contained in:
parent
d9c339cd7d
commit
03f6b95966
@ -43,7 +43,6 @@ from transformers.modeling_outputs import (
|
||||
from transformers.modeling_utils import PreTrainedModel
|
||||
from transformers.pytorch_utils import (
|
||||
ALL_LAYERNORM_LAYERS,
|
||||
is_torch_greater_or_equal_than_1_13,
|
||||
)
|
||||
from transformers.utils import (
|
||||
add_start_docstrings,
|
||||
@ -66,9 +65,6 @@ if is_flash_attn_2_available():
|
||||
# This makes `_prepare_4d_causal_attention_mask` a leaf function in the FX graph.
|
||||
# It means that the function will not be traced through and simply appear as a node in the graph.
|
||||
if is_torch_fx_available():
|
||||
if not is_torch_greater_or_equal_than_1_13:
|
||||
import torch.fx
|
||||
|
||||
_prepare_4d_causal_attention_mask = torch.fx.wrap(_prepare_4d_causal_attention_mask)
|
||||
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user