diff --git a/optimum/exporters/tasks.py b/optimum/exporters/tasks.py index 0a3758e97cf..5ed0a2c6e22 100644 --- a/optimum/exporters/tasks.py +++ b/optimum/exporters/tasks.py @@ -1790,7 +1790,7 @@ def infer_task_from_model( cache_dir=cache_dir, token=token, ) - elif type(model) == type: + elif isinstance(model, type): inferred_task_name = cls._infer_task_from_model_or_model_class(model_class=model) else: inferred_task_name = cls._infer_task_from_model_or_model_class(model=model) @@ -1944,7 +1944,7 @@ def infer_library_from_model( cache_dir=cache_dir, token=token, ) - elif type(model) == type: + elif isinstance(model, type): library_name = cls._infer_library_from_model_or_model_class(model_class=model) else: library_name = cls._infer_library_from_model_or_model_class(model=model) diff --git a/optimum/onnxruntime/runs/__init__.py b/optimum/onnxruntime/runs/__init__.py index d21db2a4aca..1d982949344 100644 --- a/optimum/onnxruntime/runs/__init__.py +++ b/optimum/onnxruntime/runs/__init__.py @@ -110,9 +110,9 @@ def __init__(self, run_config): model_class = FeaturesManager.get_model_class_for_feature(get_autoclass_name(self.task)) self.torch_model = model_class.from_pretrained(run_config["model_name_or_path"]) - self.return_body["model_type"] = ( - self.torch_model.config.model_type - ) # return_body is initialized in parent class + self.return_body[ + "model_type" + ] = self.torch_model.config.model_type # return_body is initialized in parent class def _launch_time(self, trial): batch_size = trial.suggest_categorical("batch_size", self.batch_sizes)