diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index 683b519a28cd1e..ce0086d1e3bcd8 100755 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -344,10 +344,15 @@ def check_support_param_buffer_assignment(model_to_load, state_dict, start_prefi as when loading in empty weights) by first checking if the model explicitly disables it, then by ensuring that the state dict keys are a subset of the model's parameters. + + Note: We fully disable this if we are using `deepspeed` """ if len([key for key in state_dict if key.startswith(start_prefix)]) == 0: return False + if is_deepspeed_zero3_enabled(): + return False + # Some models explicitly do not support param buffer assignment if not getattr(model_to_load, "_supports_param_buffer_assignment", True): logger.debug(