From f223f5e330b678767c76489b3c30154d6959c80c Mon Sep 17 00:00:00 2001 From: fxmarty <9808326+fxmarty@users.noreply.github.com> Date: Wed, 25 Oct 2023 10:40:36 +0200 Subject: [PATCH] Fix custom architecture detection in onnx export (#1472) fix custom architecture detection in onnx export Co-authored-by: baskrahmer --- optimum/exporters/onnx/__main__.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/optimum/exporters/onnx/__main__.py b/optimum/exporters/onnx/__main__.py index 851be1b8f6f..df5c2498eff 100644 --- a/optimum/exporters/onnx/__main__.py +++ b/optimum/exporters/onnx/__main__.py @@ -338,6 +338,7 @@ def main_export( f"The task could not be automatically inferred as this is available only for models hosted on the Hugging Face Hub. Please provide the argument --task with the relevant task from {', '.join(TasksManager.get_all_tasks())}. Detailed error: {e}" ) + custom_architecture = False if library_name == "transformers": config = AutoConfig.from_pretrained( model_name_or_path, @@ -378,7 +379,6 @@ def main_export( library_name=library_name, ) - custom_architecture = False is_stable_diffusion = "stable-diffusion" in task model_type = "stable-diffusion" if is_stable_diffusion else model.config.model_type.replace("_", "-") @@ -393,8 +393,6 @@ def main_export( f"{model_type} is not supported yet. Only {list(TasksManager._SUPPORTED_CLI_MODEL_TYPE.keys())} are supported. " f"If you want to support {model_type} please propose a PR or open up an issue." ) - if model.config.model_type.replace("_", "-") not in TasksManager._SUPPORTED_MODEL_TYPE: - custom_architecture = True # TODO: support onnx_config.py in the model repo if custom_architecture and custom_onnx_configs is None: