From 7d9d5cea551b7ac5a6222c0b32a4c269412f7b9a Mon Sep 17 00:00:00 2001 From: "Hz, Ji" Date: Mon, 8 Jan 2024 19:33:58 +0800 Subject: [PATCH] remove two deprecated function (#28220) --- src/transformers/utils/__init__.py | 2 -- src/transformers/utils/import_utils.py | 36 +------------------------- 2 files changed, 1 insertion(+), 37 deletions(-) diff --git a/src/transformers/utils/__init__.py b/src/transformers/utils/__init__.py index ff2723473f1..780090aec5e 100644 --- a/src/transformers/utils/__init__.py +++ b/src/transformers/utils/__init__.py @@ -194,9 +194,7 @@ is_training_run_on_sagemaker, is_vision_available, requires_backends, - tf_required, torch_only_method, - torch_required, ) from .peft_utils import ( ADAPTER_CONFIG_NAME, diff --git a/src/transformers/utils/import_utils.py b/src/transformers/utils/import_utils.py index eda34597a39..909965d0306 100644 --- a/src/transformers/utils/import_utils.py +++ b/src/transformers/utils/import_utils.py @@ -24,7 +24,7 @@ import sys import warnings from collections import OrderedDict -from functools import lru_cache, wraps +from functools import lru_cache from itertools import chain from types import ModuleType from typing import Any, Tuple, Union @@ -1303,40 +1303,6 @@ def __getattribute__(cls, key): requires_backends(cls, cls._backends) -def torch_required(func): - warnings.warn( - "The method `torch_required` is deprecated and will be removed in v4.36. Use `requires_backends` instead.", - FutureWarning, - ) - - # Chose a different decorator name than in tests so it's clear they are not the same. - @wraps(func) - def wrapper(*args, **kwargs): - if is_torch_available(): - return func(*args, **kwargs) - else: - raise ImportError(f"Method `{func.__name__}` requires PyTorch.") - - return wrapper - - -def tf_required(func): - warnings.warn( - "The method `tf_required` is deprecated and will be removed in v4.36. Use `requires_backends` instead.", - FutureWarning, - ) - - # Chose a different decorator name than in tests so it's clear they are not the same. - @wraps(func) - def wrapper(*args, **kwargs): - if is_tf_available(): - return func(*args, **kwargs) - else: - raise ImportError(f"Method `{func.__name__}` requires TF.") - - return wrapper - - def is_torch_fx_proxy(x): if is_torch_fx_available(): import torch.fx