diff --git a/src/transformers/models/auto/configuration_auto.py b/src/transformers/models/auto/configuration_auto.py index 69ce8efa10c76c..3e8da40c3aa7e4 100644 --- a/src/transformers/models/auto/configuration_auto.py +++ b/src/transformers/models/auto/configuration_auto.py @@ -1057,9 +1057,14 @@ def from_pretrained(cls, pretrained_model_name_or_path, **kwargs): config_class = CONFIG_MAPPING[config_dict["model_type"]] except KeyError: raise ValueError( - f"The checkpoint you are trying to load has model type `{config_dict['model_type']}` " + f"\nThe checkpoint you are trying to load has model type `{config_dict['model_type']}` " "but Transformers does not recognize this architecture. This could be because of an " - "issue with the checkpoint, or because your version of Transformers is out of date." + "issue with the checkpoint, or because your version of Transformers is out of date.\n\n" + "You can update transformers with the command `pip install --upgrade transformers`. If this " + "does not work, and the checkpoint is very new, then there may not be a release version " + "that supports this model yet. In this case, you can get the most up-to-date code by installing " + "Transformers from source with the command " + "`pip install git+https://github.com/huggingface/transformers.git`" ) return config_class.from_dict(config_dict, **unused_kwargs) else: